diff --git a/.core_files.yaml b/.core_files.yaml index 08cabb71164..4a11d5da27c 100644 --- a/.core_files.yaml +++ b/.core_files.yaml @@ -61,6 +61,7 @@ components: &components - homeassistant/components/auth/** - homeassistant/components/automation/** - homeassistant/components/backup/** + - homeassistant/components/blueprint/** - homeassistant/components/bluetooth/** - homeassistant/components/cloud/** - homeassistant/components/config/** @@ -146,6 +147,7 @@ requirements: &requirements - homeassistant/package_constraints.txt - requirements*.txt - pyproject.toml + - script/licenses.py any: - *base_platforms diff --git a/.github/workflows/builder.yml b/.github/workflows/builder.yml index d0edc631762..65ad0e240bc 100644 --- a/.github/workflows/builder.yml +++ b/.github/workflows/builder.yml @@ -69,7 +69,7 @@ jobs: run: find ./homeassistant/components/*/translations -name "*.json" | tar zcvf translations.tar.gz -T - - name: Upload translations - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: translations path: translations.tar.gz @@ -197,7 +197,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2024.03.5 + uses: home-assistant/builder@2024.08.2 with: args: | $BUILD_ARGS \ @@ -263,7 +263,7 @@ jobs: password: ${{ secrets.GITHUB_TOKEN }} - name: Build base image - uses: home-assistant/builder@2024.03.5 + uses: home-assistant/builder@2024.08.2 with: args: | $BUILD_ARGS \ @@ -323,7 +323,7 @@ jobs: uses: actions/checkout@v4.1.7 - name: Install Cosign - uses: sigstore/cosign-installer@v3.5.0 + uses: sigstore/cosign-installer@v3.6.0 with: cosign-release: "v2.2.3" @@ -482,3 +482,56 @@ jobs: export TWINE_PASSWORD="${{ secrets.TWINE_TOKEN }}" twine upload dist/* --skip-existing + + hassfest-image: + name: Build and test hassfest image + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + attestations: write + id-token: write + needs: ["init"] + if: github.repository_owner == 'home-assistant' + env: + HASSFEST_IMAGE_NAME: ghcr.io/home-assistant/hassfest + HASSFEST_IMAGE_TAG: ghcr.io/home-assistant/hassfest:${{ needs.init.outputs.version }} + steps: + - name: Checkout repository + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + + - name: Login to GitHub Container Registry + uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0 + with: + registry: ghcr.io + username: ${{ github.repository_owner }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Build Docker image + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . # So action will not pull the repository again + file: ./script/hassfest/docker/Dockerfile + load: true + tags: ${{ env.HASSFEST_IMAGE_TAG }} + + - name: Run hassfest against core + run: docker run --rm -v ${{ github.workspace }}/homeassistant:/github/workspace/homeassistant ${{ env.HASSFEST_IMAGE_TAG }} --core-integrations-path=/github/workspace/homeassistant/components + + - name: Push Docker image + if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' + id: push + uses: docker/build-push-action@5cd11c3a4ced054e52742c5fd54dca954e0edd85 # v6.7.0 + with: + context: . # So action will not pull the repository again + file: ./script/hassfest/docker/Dockerfile + push: true + tags: ${{ env.HASSFEST_IMAGE_TAG }},${{ env.HASSFEST_IMAGE_NAME }}:latest + + - name: Generate artifact attestation + if: needs.init.outputs.channel != 'dev' && needs.init.outputs.publish == 'true' + uses: actions/attest-build-provenance@6149ea5740be74af77f260b9db67e633f6b0a9a1 # v1.4.2 + with: + subject-name: ${{ env.HASSFEST_IMAGE_NAME }} + subject-digest: ${{ steps.push.outputs.digest }} + push-to-registry: true diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index b705064e078..f6ffa439d9b 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -31,12 +31,16 @@ on: description: "Only run mypy" default: false type: boolean + audit-licenses-only: + description: "Only run audit licenses" + default: false + type: boolean env: - CACHE_VERSION: 9 + CACHE_VERSION: 10 UV_CACHE_VERSION: 1 MYPY_CACHE_VERSION: 8 - HA_SHORT_VERSION: "2024.8" + HA_SHORT_VERSION: "2024.9" DEFAULT_PYTHON: "3.12" ALL_PYTHON_VERSIONS: "['3.12']" # 10.3 is the oldest supported version @@ -222,6 +226,7 @@ jobs: if: | github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' needs: - info steps: @@ -343,6 +348,7 @@ jobs: pre-commit run --hook-stage manual ruff --all-files --show-diff-on-failure env: RUFF_OUTPUT_FORMAT: github + lint-other: name: Check other linters runs-on: ubuntu-24.04 @@ -508,8 +514,7 @@ jobs: uv pip install -U "pip>=21.3.1" setuptools wheel uv pip install -r requirements.txt python -m script.gen_requirements_all ci - uv pip install -r requirements_all_pytest.txt - uv pip install -r requirements_test.txt + uv pip install -r requirements_all_pytest.txt -r requirements_test.txt uv pip install -e . --config-settings editable_mode=compat hassfest: @@ -518,6 +523,7 @@ jobs: if: | github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' needs: - info - base @@ -556,6 +562,7 @@ jobs: if: | github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' needs: - info - base @@ -589,7 +596,10 @@ jobs: - info - base if: | - needs.info.outputs.requirements == 'true' + (github.event.inputs.pylint-only != 'true' + && github.event.inputs.mypy-only != 'true' + || github.event.inputs.audit-licenses-only == 'true') + && needs.info.outputs.requirements == 'true' steps: - name: Check out code from GitHub uses: actions/checkout@v4.1.7 @@ -613,7 +623,7 @@ jobs: . venv/bin/activate pip-licenses --format=json --output-file=licenses.json - name: Upload licenses - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: licenses path: licenses.json @@ -628,6 +638,7 @@ jobs: timeout-minutes: 20 if: | github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' || github.event.inputs.pylint-only == 'true' needs: - info @@ -672,7 +683,9 @@ jobs: runs-on: ubuntu-24.04 timeout-minutes: 20 if: | - (github.event.inputs.mypy-only != 'true' || github.event.inputs.pylint-only == 'true') + (github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' + || github.event.inputs.pylint-only == 'true') && (needs.info.outputs.tests_glob || needs.info.outputs.test_full_suite == 'true') needs: - info @@ -703,20 +716,21 @@ jobs: run: | . venv/bin/activate python --version - pylint --ignore-missing-annotations=y tests + pylint tests - name: Run pylint (partially) if: needs.info.outputs.test_full_suite == 'false' shell: bash run: | . venv/bin/activate python --version - pylint --ignore-missing-annotations=y tests/components/${{ needs.info.outputs.tests_glob }} + pylint tests/components/${{ needs.info.outputs.tests_glob }} mypy: name: Check mypy runs-on: ubuntu-24.04 if: | github.event.inputs.pylint-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' || github.event.inputs.mypy-only == 'true' needs: - info @@ -781,6 +795,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.test_full_suite == 'true' needs: - info @@ -818,7 +833,7 @@ jobs: . venv/bin/activate python -m script.split_tests ${{ needs.info.outputs.test_group_count }} tests - name: Upload pytest_buckets - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: pytest_buckets path: pytest_buckets.txt @@ -831,6 +846,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.test_full_suite == 'true' needs: - info @@ -918,14 +934,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-full.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml @@ -951,6 +967,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.mariadb_groups != '[]' needs: - info @@ -1043,7 +1060,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${mariadb}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1051,7 +1068,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.mariadb }} @@ -1076,6 +1093,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.postgresql_groups != '[]' needs: - info @@ -1169,7 +1187,7 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${postgresql}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1177,7 +1195,7 @@ jobs: overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: coverage-${{ matrix.python-version }}-${{ steps.pytest-partial.outputs.postgresql }} @@ -1220,6 +1238,7 @@ jobs: && github.event.inputs.lint-only != 'true' && github.event.inputs.pylint-only != 'true' && github.event.inputs.mypy-only != 'true' + && github.event.inputs.audit-licenses-only != 'true' && needs.info.outputs.tests_glob && needs.info.outputs.test_full_suite == 'false' needs: @@ -1310,14 +1329,14 @@ jobs: 2>&1 | tee pytest-${{ matrix.python-version }}-${{ matrix.group }}.txt - name: Upload pytest output if: success() || failure() && steps.pytest-partial.conclusion == 'failure' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: pytest-${{ github.run_number }}-${{ matrix.python-version }}-${{ matrix.group }} path: pytest-*.txt overwrite: true - name: Upload coverage artifact if: needs.info.outputs.skip_coverage != 'true' - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: coverage-${{ matrix.python-version }}-${{ matrix.group }} path: coverage.xml diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 7fe545e469c..a4653a833c4 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -24,11 +24,11 @@ jobs: uses: actions/checkout@v4.1.7 - name: Initialize CodeQL - uses: github/codeql-action/init@v3.25.15 + uses: github/codeql-action/init@v3.26.5 with: languages: python - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3.25.15 + uses: github/codeql-action/analyze@v3.26.5 with: category: "/language:python" diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index b74406b9c82..694208d30ac 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -82,14 +82,14 @@ jobs: ) > .env_file - name: Upload env_file - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: env_file path: ./.env_file overwrite: true - name: Upload requirements_diff - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: requirements_diff path: ./requirements_diff.txt @@ -101,7 +101,7 @@ jobs: python -m script.gen_requirements_all ci - name: Upload requirements_all_wheels - uses: actions/upload-artifact@v4.3.4 + uses: actions/upload-artifact@v4.3.6 with: name: requirements_all_wheels path: ./requirements_all_wheels_*.txt @@ -211,7 +211,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_old-cython.txt" @@ -226,7 +226,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtaa" @@ -240,7 +240,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtab" @@ -254,7 +254,7 @@ jobs: wheels-key: ${{ secrets.WHEELS_KEY }} env-file: true apk: "bluez-dev;libffi-dev;openssl-dev;glib-dev;eudev-dev;libxml2-dev;libxslt-dev;libpng-dev;libjpeg-turbo-dev;tiff-dev;cups-dev;gmp-dev;mpfr-dev;mpc1-dev;ffmpeg-dev;gammu-dev;yaml-dev;openblas-dev;fftw-dev;lapack-dev;gfortran;blas-dev;eigen-dev;freetype-dev;glew-dev;harfbuzz-dev;hdf5-dev;libdc1394-dev;libtbb-dev;mesa-dev;openexr-dev;openjpeg-dev;uchardet-dev;nasm" - skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic + skip-binary: aiohttp;charset-normalizer;grpcio;SQLAlchemy;protobuf;pydantic;pymicro-vad constraints: "homeassistant/package_constraints.txt" requirements-diff: "requirements_diff.txt" requirements: "requirements_all.txtac" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 22e10d420d4..ab5e59139cf 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,6 +1,6 @@ repos: - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.5.5 + rev: v0.6.2 hooks: - id: ruff args: @@ -12,7 +12,7 @@ repos: hooks: - id: codespell args: - - --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn,pres,ser,ue + - --ignore-words-list=astroid,checkin,currenty,hass,iif,incomfort,lookin,nam,NotIn - --skip="./.*,*.csv,*.json,*.ambr" - --quiet-level=2 exclude_types: [csv, json, html] diff --git a/.strict-typing b/.strict-typing index 02d9968d247..2566a5349c2 100644 --- a/.strict-typing +++ b/.strict-typing @@ -95,8 +95,6 @@ homeassistant.components.aruba.* homeassistant.components.arwn.* homeassistant.components.aseko_pool_live.* homeassistant.components.assist_pipeline.* -homeassistant.components.asterisk_cdr.* -homeassistant.components.asterisk_mbox.* homeassistant.components.asuswrt.* homeassistant.components.autarco.* homeassistant.components.auth.* @@ -198,7 +196,9 @@ homeassistant.components.fritzbox.* homeassistant.components.fritzbox_callmonitor.* homeassistant.components.fronius.* homeassistant.components.frontend.* +homeassistant.components.fujitsu_fglair.* homeassistant.components.fully_kiosk.* +homeassistant.components.fyta.* homeassistant.components.generic_hygrostat.* homeassistant.components.generic_thermostat.* homeassistant.components.geo_location.* @@ -294,7 +294,7 @@ homeassistant.components.london_underground.* homeassistant.components.lookin.* homeassistant.components.luftdaten.* homeassistant.components.madvr.* -homeassistant.components.mailbox.* +homeassistant.components.manual.* homeassistant.components.map.* homeassistant.components.mastodon.* homeassistant.components.matrix.* diff --git a/CODEOWNERS b/CODEOWNERS index e90def993d2..6f118ca1ba8 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -108,6 +108,8 @@ build.json @home-assistant/supervisor /tests/components/anova/ @Lash-L /homeassistant/components/anthemav/ @hyralex /tests/components/anthemav/ @hyralex +/homeassistant/components/anthropic/ @Shulyaka +/tests/components/anthropic/ @Shulyaka /homeassistant/components/aosmith/ @bdr99 /tests/components/aosmith/ @bdr99 /homeassistant/components/apache_kafka/ @bachya @@ -347,8 +349,8 @@ build.json @home-assistant/supervisor /tests/components/dremel_3d_printer/ @tkdrob /homeassistant/components/drop_connect/ @ChandlerSystems @pfrazer /tests/components/drop_connect/ @ChandlerSystems @pfrazer -/homeassistant/components/dsmr/ @Robbie1221 @frenck -/tests/components/dsmr/ @Robbie1221 @frenck +/homeassistant/components/dsmr/ @Robbie1221 +/tests/components/dsmr/ @Robbie1221 /homeassistant/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna /tests/components/dsmr_reader/ @sorted-bits @glodenox @erwindouna /homeassistant/components/duotecno/ @cereal2nd @@ -431,6 +433,7 @@ build.json @home-assistant/supervisor /homeassistant/components/evil_genius_labs/ @balloob /tests/components/evil_genius_labs/ @balloob /homeassistant/components/evohome/ @zxdavb +/tests/components/evohome/ @zxdavb /homeassistant/components/ezviz/ @RenierM26 @baqs /tests/components/ezviz/ @RenierM26 @baqs /homeassistant/components/faa_delays/ @ntilley905 @@ -496,6 +499,8 @@ build.json @home-assistant/supervisor /tests/components/frontend/ @home-assistant/frontend /homeassistant/components/frontier_silicon/ @wlcrs /tests/components/frontier_silicon/ @wlcrs +/homeassistant/components/fujitsu_fglair/ @crevetor +/tests/components/fujitsu_fglair/ @crevetor /homeassistant/components/fully_kiosk/ @cgarwood /tests/components/fully_kiosk/ @cgarwood /homeassistant/components/fyta/ @dontinelli @@ -823,8 +828,6 @@ build.json @home-assistant/supervisor /tests/components/logbook/ @home-assistant/core /homeassistant/components/logger/ @home-assistant/core /tests/components/logger/ @home-assistant/core -/homeassistant/components/logi_circle/ @evanjd -/tests/components/logi_circle/ @evanjd /homeassistant/components/london_underground/ @jpbede /tests/components/london_underground/ @jpbede /homeassistant/components/lookin/ @ANMalko @bdraco @@ -967,6 +970,8 @@ build.json @home-assistant/supervisor /tests/components/nfandroidtv/ @tkdrob /homeassistant/components/nibe_heatpump/ @elupus /tests/components/nibe_heatpump/ @elupus +/homeassistant/components/nice_go/ @IceBotYT +/tests/components/nice_go/ @IceBotYT /homeassistant/components/nightscout/ @marciogranzotto /tests/components/nightscout/ @marciogranzotto /homeassistant/components/nilu/ @hfurubotten @@ -1324,6 +1329,8 @@ build.json @home-assistant/supervisor /homeassistant/components/smarty/ @z0mbieprocess /homeassistant/components/smhi/ @gjohansson-ST /tests/components/smhi/ @gjohansson-ST +/homeassistant/components/smlight/ @tl-sl +/tests/components/smlight/ @tl-sl /homeassistant/components/sms/ @ocalvo /tests/components/sms/ @ocalvo /homeassistant/components/snapcast/ @luar123 @@ -1486,6 +1493,8 @@ build.json @home-assistant/supervisor /tests/components/tomorrowio/ @raman325 @lymanepp /homeassistant/components/totalconnect/ @austinmroczek /tests/components/totalconnect/ @austinmroczek +/homeassistant/components/touchline_sl/ @jnsgruk +/tests/components/touchline_sl/ @jnsgruk /homeassistant/components/tplink/ @rytilahti @bdraco @sdb9696 /tests/components/tplink/ @rytilahti @bdraco @sdb9696 /homeassistant/components/tplink_omada/ @MarkGodwin @@ -1651,6 +1660,8 @@ build.json @home-assistant/supervisor /tests/components/xiaomi_miio/ @rytilahti @syssi @starkillerOG /homeassistant/components/xiaomi_tv/ @simse /homeassistant/components/xmpp/ @fabaff @flowolf +/homeassistant/components/yale/ @bdraco +/tests/components/yale/ @bdraco /homeassistant/components/yale_smart_alarm/ @gjohansson-ST /tests/components/yale_smart_alarm/ @gjohansson-ST /homeassistant/components/yalexs_ble/ @bdraco diff --git a/Dockerfile.dev b/Dockerfile.dev index d7a2f2b7bf9..d05c6df425c 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -42,7 +42,8 @@ WORKDIR /usr/src # Setup hass-release RUN git clone --depth 1 https://github.com/home-assistant/hass-release \ - && uv pip install --system -e hass-release/ + && uv pip install --system -e hass-release/ \ + && chown -R vscode /usr/src/hass-release/data USER vscode ENV VIRTUAL_ENV="/home/vscode/.local/ha-venv" diff --git a/homeassistant/block_async_io.py b/homeassistant/block_async_io.py index 5b8ba535b5a..7a68b2515e9 100644 --- a/homeassistant/block_async_io.py +++ b/homeassistant/block_async_io.py @@ -8,6 +8,8 @@ import glob from http.client import HTTPConnection import importlib import os +from pathlib import Path +from ssl import SSLContext import sys import threading import time @@ -143,6 +145,78 @@ _BLOCKING_CALLS: tuple[BlockingCall, ...] = ( strict_core=False, skip_for_tests=True, ), + BlockingCall( + original_func=SSLContext.load_default_certs, + object=SSLContext, + function="load_default_certs", + check_allowed=None, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=SSLContext.load_verify_locations, + object=SSLContext, + function="load_verify_locations", + check_allowed=None, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=SSLContext.load_cert_chain, + object=SSLContext, + function="load_cert_chain", + check_allowed=None, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.open, + object=Path, + function="open", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.read_text, + object=Path, + function="read_text", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.read_bytes, + object=Path, + function="read_bytes", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.write_text, + object=Path, + function="write_text", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), + BlockingCall( + original_func=Path.write_bytes, + object=Path, + function="write_bytes", + check_allowed=_check_file_allowed, + strict=False, + strict_core=False, + skip_for_tests=True, + ), ) diff --git a/homeassistant/bootstrap.py b/homeassistant/bootstrap.py index 43f4d451497..742a293e4c4 100644 --- a/homeassistant/bootstrap.py +++ b/homeassistant/bootstrap.py @@ -586,10 +586,10 @@ async def async_enable_logging( logging.getLogger("aiohttp.access").setLevel(logging.WARNING) logging.getLogger("httpx").setLevel(logging.WARNING) - sys.excepthook = lambda *args: logging.getLogger(None).exception( + sys.excepthook = lambda *args: logging.getLogger().exception( "Uncaught exception", exc_info=args ) - threading.excepthook = lambda args: logging.getLogger(None).exception( + threading.excepthook = lambda args: logging.getLogger().exception( "Uncaught thread exception", exc_info=( # type: ignore[arg-type] args.exc_type, @@ -616,10 +616,9 @@ async def async_enable_logging( _create_log_file, err_log_path, log_rotate_days ) - err_handler.setLevel(logging.INFO if verbose else logging.WARNING) err_handler.setFormatter(logging.Formatter(fmt, datefmt=FORMAT_DATETIME)) - logger = logging.getLogger("") + logger = logging.getLogger() logger.addHandler(err_handler) logger.setLevel(logging.INFO if verbose else logging.WARNING) diff --git a/homeassistant/brands/asterisk.json b/homeassistant/brands/asterisk.json deleted file mode 100644 index 1df3e660afe..00000000000 --- a/homeassistant/brands/asterisk.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "domain": "asterisk", - "name": "Asterisk", - "integrations": ["asterisk_cdr", "asterisk_mbox"] -} diff --git a/homeassistant/brands/fujitsu.json b/homeassistant/brands/fujitsu.json new file mode 100644 index 00000000000..75d12e33851 --- /dev/null +++ b/homeassistant/brands/fujitsu.json @@ -0,0 +1,5 @@ +{ + "domain": "fujitsu", + "name": "Fujitsu", + "integrations": ["fujitsu_anywair", "fujitsu_fglair"] +} diff --git a/homeassistant/brands/roth.json b/homeassistant/brands/roth.json new file mode 100644 index 00000000000..21542b5b641 --- /dev/null +++ b/homeassistant/brands/roth.json @@ -0,0 +1,5 @@ +{ + "domain": "roth", + "name": "Roth", + "integrations": ["touchline", "touchline_sl"] +} diff --git a/homeassistant/brands/yale.json b/homeassistant/brands/yale.json index 53dc9b43569..a0e7c6bd453 100644 --- a/homeassistant/brands/yale.json +++ b/homeassistant/brands/yale.json @@ -1,5 +1,11 @@ { "domain": "yale", "name": "Yale", - "integrations": ["august", "yale_smart_alarm", "yalexs_ble", "yale_home"] + "integrations": [ + "august", + "yale_smart_alarm", + "yalexs_ble", + "yale_home", + "yale" + ] } diff --git a/homeassistant/components/abode/icons.json b/homeassistant/components/abode/icons.json index 00175628d9a..4ce4e55cab6 100644 --- a/homeassistant/components/abode/icons.json +++ b/homeassistant/components/abode/icons.json @@ -7,8 +7,14 @@ } }, "services": { - "capture_image": "mdi:camera", - "change_setting": "mdi:cog", - "trigger_automation": "mdi:play" + "capture_image": { + "service": "mdi:camera" + }, + "change_setting": { + "service": "mdi:cog" + }, + "trigger_automation": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/acer_projector/switch.py b/homeassistant/components/acer_projector/switch.py index 5c1c37df5d8..c1463cd9a08 100644 --- a/homeassistant/components/acer_projector/switch.py +++ b/homeassistant/components/acer_projector/switch.py @@ -81,7 +81,7 @@ class AcerSwitch(SwitchEntity): write_timeout: int, ) -> None: """Init of the Acer projector.""" - self.ser = serial.Serial( + self.serial = serial.Serial( port=serial_port, timeout=timeout, write_timeout=write_timeout ) self._serial_port = serial_port @@ -99,16 +99,16 @@ class AcerSwitch(SwitchEntity): # was disconnected during runtime. # This way the projector can be reconnected and will still work try: - if not self.ser.is_open: - self.ser.open() - self.ser.write(msg.encode("utf-8")) + if not self.serial.is_open: + self.serial.open() + self.serial.write(msg.encode("utf-8")) # Size is an experience value there is no real limit. # AFAIK there is no limit and no end character so we will usually # need to wait for timeout - ret = self.ser.read_until(size=20).decode("utf-8") + ret = self.serial.read_until(size=20).decode("utf-8") except serial.SerialException: _LOGGER.error("Problem communicating with %s", self._serial_port) - self.ser.close() + self.serial.close() return ret def _write_read_format(self, msg: str) -> str: diff --git a/homeassistant/components/acmeda/manifest.json b/homeassistant/components/acmeda/manifest.json index a8b3c7c829f..0c35904cac6 100644 --- a/homeassistant/components/acmeda/manifest.json +++ b/homeassistant/components/acmeda/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/acmeda", "iot_class": "local_push", "loggers": ["aiopulse"], - "requirements": ["aiopulse==0.4.4"] + "requirements": ["aiopulse==0.4.6"] } diff --git a/homeassistant/components/adguard/icons.json b/homeassistant/components/adguard/icons.json index 9c5df8a4a45..18527c0ed98 100644 --- a/homeassistant/components/adguard/icons.json +++ b/homeassistant/components/adguard/icons.json @@ -66,10 +66,20 @@ } }, "services": { - "add_url": "mdi:link-plus", - "remove_url": "mdi:link-off", - "enable_url": "mdi:link-variant", - "disable_url": "mdi:link-variant-off", - "refresh": "mdi:refresh" + "add_url": { + "service": "mdi:link-plus" + }, + "remove_url": { + "service": "mdi:link-off" + }, + "enable_url": { + "service": "mdi:link-variant" + }, + "disable_url": { + "service": "mdi:link-variant-off" + }, + "refresh": { + "service": "mdi:refresh" + } } } diff --git a/homeassistant/components/ads/__init__.py b/homeassistant/components/ads/__init__.py index 7041a757a42..f5742718b12 100644 --- a/homeassistant/components/ads/__init__.py +++ b/homeassistant/components/ads/__init__.py @@ -136,7 +136,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: # Tuple to hold data needed for notification -NotificationItem = namedtuple( +NotificationItem = namedtuple( # noqa: PYI024 "NotificationItem", "hnotify huser name plc_datatype callback" ) diff --git a/homeassistant/components/ads/icons.json b/homeassistant/components/ads/icons.json index 5ab8041fe9b..3732f16bf1a 100644 --- a/homeassistant/components/ads/icons.json +++ b/homeassistant/components/ads/icons.json @@ -1,5 +1,7 @@ { "services": { - "write_data_by_name": "mdi:pencil" + "write_data_by_name": { + "service": "mdi:pencil" + } } } diff --git a/homeassistant/components/advantage_air/icons.json b/homeassistant/components/advantage_air/icons.json index a4168f440cf..8651c9d9eaf 100644 --- a/homeassistant/components/advantage_air/icons.json +++ b/homeassistant/components/advantage_air/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_time_to": "mdi:timer-cog" + "set_time_to": { + "service": "mdi:timer-cog" + } } } diff --git a/homeassistant/components/aftership/icons.json b/homeassistant/components/aftership/icons.json index 1222ab0873d..105d3cef3ec 100644 --- a/homeassistant/components/aftership/icons.json +++ b/homeassistant/components/aftership/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "add_tracking": "mdi:package-variant-plus", - "remove_tracking": "mdi:package-variant-minus" + "add_tracking": { + "service": "mdi:package-variant-plus" + }, + "remove_tracking": { + "service": "mdi:package-variant-minus" + } } } diff --git a/homeassistant/components/agent_dvr/camera.py b/homeassistant/components/agent_dvr/camera.py index 4438bf72a1a..933d0c6b40b 100644 --- a/homeassistant/components/agent_dvr/camera.py +++ b/homeassistant/components/agent_dvr/camera.py @@ -59,7 +59,7 @@ async def async_setup_entry( platform = async_get_current_platform() for service, method in CAMERA_SERVICES.items(): - platform.async_register_entity_service(service, {}, method) + platform.async_register_entity_service(service, None, method) class AgentCamera(MjpegCamera): diff --git a/homeassistant/components/agent_dvr/icons.json b/homeassistant/components/agent_dvr/icons.json index 6550d01641e..7dfb4a847f6 100644 --- a/homeassistant/components/agent_dvr/icons.json +++ b/homeassistant/components/agent_dvr/icons.json @@ -1,9 +1,19 @@ { "services": { - "start_recording": "mdi:record-rec", - "stop_recording": "mdi:stop", - "enable_alerts": "mdi:bell-alert", - "disable_alerts": "mdi:bell-off", - "snapshot": "mdi:camera" + "start_recording": { + "service": "mdi:record-rec" + }, + "stop_recording": { + "service": "mdi:stop" + }, + "enable_alerts": { + "service": "mdi:bell-alert" + }, + "disable_alerts": { + "service": "mdi:bell-off" + }, + "snapshot": { + "service": "mdi:camera" + } } } diff --git a/homeassistant/components/airgradient/__init__.py b/homeassistant/components/airgradient/__init__.py index 69f1e70c6af..7ee8ac6a3c7 100644 --- a/homeassistant/components/airgradient/__init__.py +++ b/homeassistant/components/airgradient/__init__.py @@ -21,6 +21,7 @@ PLATFORMS: list[Platform] = [ Platform.SELECT, Platform.SENSOR, Platform.SWITCH, + Platform.UPDATE, ] diff --git a/homeassistant/components/airgradient/update.py b/homeassistant/components/airgradient/update.py new file mode 100644 index 00000000000..95e64930ea6 --- /dev/null +++ b/homeassistant/components/airgradient/update.py @@ -0,0 +1,55 @@ +"""Airgradient Update platform.""" + +from datetime import timedelta +from functools import cached_property + +from homeassistant.components.update import UpdateDeviceClass, UpdateEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import AirGradientConfigEntry, AirGradientMeasurementCoordinator +from .entity import AirGradientEntity + +SCAN_INTERVAL = timedelta(hours=1) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: AirGradientConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Airgradient update platform.""" + + data = config_entry.runtime_data + + async_add_entities([AirGradientUpdate(data.measurement)], True) + + +class AirGradientUpdate(AirGradientEntity, UpdateEntity): + """Representation of Airgradient Update.""" + + _attr_device_class = UpdateDeviceClass.FIRMWARE + coordinator: AirGradientMeasurementCoordinator + + def __init__(self, coordinator: AirGradientMeasurementCoordinator) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + self._attr_unique_id = f"{coordinator.serial_number}-update" + + @cached_property + def should_poll(self) -> bool: + """Return True because we need to poll the latest version.""" + return True + + @property + def installed_version(self) -> str: + """Return the installed version of the entity.""" + return self.coordinator.data.firmware_version + + async def async_update(self) -> None: + """Update the entity.""" + self._attr_latest_version = ( + await self.coordinator.client.get_latest_firmware_version( + self.coordinator.serial_number + ) + ) diff --git a/homeassistant/components/airthings_ble/manifest.json b/homeassistant/components/airthings_ble/manifest.json index b86bc314819..6c00fe79e7b 100644 --- a/homeassistant/components/airthings_ble/manifest.json +++ b/homeassistant/components/airthings_ble/manifest.json @@ -24,5 +24,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/airthings_ble", "iot_class": "local_polling", - "requirements": ["airthings-ble==0.9.0"] + "requirements": ["airthings-ble==0.9.1"] } diff --git a/homeassistant/components/airtouch4/config_flow.py b/homeassistant/components/airtouch4/config_flow.py index 12e01ffde29..02bb5cc3ad0 100644 --- a/homeassistant/components/airtouch4/config_flow.py +++ b/homeassistant/components/airtouch4/config_flow.py @@ -1,9 +1,11 @@ """Config flow for AirTouch4.""" +from typing import Any + from airtouch4pyapi import AirTouch, AirTouchStatus import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from .const import DOMAIN @@ -16,7 +18,9 @@ class AirtouchConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if user_input is None: return self.async_show_form(step_id="user", data_schema=DATA_SCHEMA) diff --git a/homeassistant/components/airvisual/__init__.py b/homeassistant/components/airvisual/__init__.py index 4d0563ddce8..60fdbf12ca1 100644 --- a/homeassistant/components/airvisual/__init__.py +++ b/homeassistant/components/airvisual/__init__.py @@ -31,7 +31,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import ( aiohttp_client, - config_validation as cv, device_registry as dr, entity_registry as er, ) @@ -62,8 +61,6 @@ PLATFORMS = [Platform.SENSOR] DEFAULT_ATTRIBUTION = "Data provided by AirVisual" -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - @callback def async_get_cloud_api_update_interval( diff --git a/homeassistant/components/airvisual_pro/config_flow.py b/homeassistant/components/airvisual_pro/config_flow.py index ebdbc807b18..db83411b4a4 100644 --- a/homeassistant/components/airvisual_pro/config_flow.py +++ b/homeassistant/components/airvisual_pro/config_flow.py @@ -80,11 +80,9 @@ class AirVisualProFlowHandler(ConfigFlow, domain=DOMAIN): """Initialize.""" self._reauth_entry: ConfigEntry | None = None - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: + """Import a config entry from `airvisual` integration (see #83882).""" + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/airzone/manifest.json b/homeassistant/components/airzone/manifest.json index 0c32787d8ae..31ff7423ad6 100644 --- a/homeassistant/components/airzone/manifest.json +++ b/homeassistant/components/airzone/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/airzone", "iot_class": "local_polling", "loggers": ["aioairzone"], - "requirements": ["aioairzone==0.8.1"] + "requirements": ["aioairzone==0.8.2"] } diff --git a/homeassistant/components/airzone/select.py b/homeassistant/components/airzone/select.py index 493150e5c6a..2bc11bc4228 100644 --- a/homeassistant/components/airzone/select.py +++ b/homeassistant/components/airzone/select.py @@ -2,16 +2,21 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from typing import Any, Final -from aioairzone.common import GrilleAngle, SleepTimeout +from aioairzone.common import GrilleAngle, OperationMode, SleepTimeout from aioairzone.const import ( API_COLD_ANGLE, API_HEAT_ANGLE, + API_MODE, API_SLEEP, AZD_COLD_ANGLE, AZD_HEAT_ANGLE, + AZD_MASTER, + AZD_MODE, + AZD_MODES, AZD_SLEEP, AZD_ZONES, ) @@ -33,6 +38,9 @@ class AirzoneSelectDescription(SelectEntityDescription): api_param: str options_dict: dict[str, int] + options_fn: Callable[[dict[str, Any], dict[str, int]], list[str]] = ( + lambda zone_data, value: list(value) + ) GRILLE_ANGLE_DICT: Final[dict[str, int]] = { @@ -42,6 +50,15 @@ GRILLE_ANGLE_DICT: Final[dict[str, int]] = { "40deg": GrilleAngle.DEG_40, } +MODE_DICT: Final[dict[str, int]] = { + "cool": OperationMode.COOLING, + "dry": OperationMode.DRY, + "fan": OperationMode.FAN, + "heat": OperationMode.HEATING, + "heat_cool": OperationMode.AUTO, + "stop": OperationMode.STOP, +} + SLEEP_DICT: Final[dict[str, int]] = { "off": SleepTimeout.SLEEP_OFF, "30m": SleepTimeout.SLEEP_30, @@ -50,6 +67,26 @@ SLEEP_DICT: Final[dict[str, int]] = { } +def main_zone_options( + zone_data: dict[str, Any], + options: dict[str, int], +) -> list[str]: + """Filter available modes.""" + modes = zone_data.get(AZD_MODES, []) + return [k for k, v in options.items() if v in modes] + + +MAIN_ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( + AirzoneSelectDescription( + api_param=API_MODE, + key=AZD_MODE, + options_dict=MODE_DICT, + options_fn=main_zone_options, + translation_key="modes", + ), +) + + ZONE_SELECT_TYPES: Final[tuple[AirzoneSelectDescription, ...]] = ( AirzoneSelectDescription( api_param=API_COLD_ANGLE, @@ -95,7 +132,20 @@ async def async_setup_entry( received_zones = set(zones_data) new_zones = received_zones - added_zones if new_zones: - async_add_entities( + entities: list[AirzoneZoneSelect] = [ + AirzoneZoneSelect( + coordinator, + description, + entry, + system_zone_id, + zones_data.get(system_zone_id), + ) + for system_zone_id in new_zones + for description in MAIN_ZONE_SELECT_TYPES + if description.key in zones_data.get(system_zone_id) + and zones_data.get(system_zone_id).get(AZD_MASTER) is True + ] + entities += [ AirzoneZoneSelect( coordinator, description, @@ -106,7 +156,8 @@ async def async_setup_entry( for system_zone_id in new_zones for description in ZONE_SELECT_TYPES if description.key in zones_data.get(system_zone_id) - ) + ] + async_add_entities(entities) added_zones.update(new_zones) entry.async_on_unload(coordinator.async_add_listener(_async_entity_listener)) @@ -153,6 +204,11 @@ class AirzoneZoneSelect(AirzoneZoneEntity, AirzoneBaseSelect): f"{self._attr_unique_id}_{system_zone_id}_{description.key}" ) self.entity_description = description + + self._attr_options = self.entity_description.options_fn( + zone_data, description.options_dict + ) + self.values_dict = {v: k for k, v in description.options_dict.items()} self._async_update_attrs() diff --git a/homeassistant/components/airzone/strings.json b/homeassistant/components/airzone/strings.json index 438304d7f41..cd313b821aa 100644 --- a/homeassistant/components/airzone/strings.json +++ b/homeassistant/components/airzone/strings.json @@ -52,6 +52,17 @@ "40deg": "[%key:component::airzone::entity::select::grille_angles::state::40deg%]" } }, + "modes": { + "name": "Mode", + "state": { + "cool": "[%key:component::climate::entity_component::_::state::cool%]", + "dry": "[%key:component::climate::entity_component::_::state::dry%]", + "fan": "[%key:component::climate::entity_component::_::state::fan_only%]", + "heat": "[%key:component::climate::entity_component::_::state::heat%]", + "heat_cool": "[%key:component::climate::entity_component::_::state::heat_cool%]", + "stop": "Stop" + } + }, "sleep_times": { "name": "Sleep", "state": { diff --git a/homeassistant/components/airzone_cloud/binary_sensor.py b/homeassistant/components/airzone_cloud/binary_sensor.py index f22515155f1..3d6f6b42901 100644 --- a/homeassistant/components/airzone_cloud/binary_sensor.py +++ b/homeassistant/components/airzone_cloud/binary_sensor.py @@ -161,6 +161,11 @@ class AirzoneBinarySensor(AirzoneEntity, BinarySensorEntity): entity_description: AirzoneBinarySensorEntityDescription + @property + def available(self) -> bool: + """Return Airzone Cloud binary sensor availability.""" + return super().available and self.is_on is not None + @callback def _handle_coordinator_update(self) -> None: """Update attributes when the coordinator updates.""" diff --git a/homeassistant/components/airzone_cloud/sensor.py b/homeassistant/components/airzone_cloud/sensor.py index a3a456edd03..9f0ee01aca2 100644 --- a/homeassistant/components/airzone_cloud/sensor.py +++ b/homeassistant/components/airzone_cloud/sensor.py @@ -189,6 +189,11 @@ async def async_setup_entry( class AirzoneSensor(AirzoneEntity, SensorEntity): """Define an Airzone Cloud sensor.""" + @property + def available(self) -> bool: + """Return Airzone Cloud sensor availability.""" + return super().available and self.native_value is not None + @callback def _handle_coordinator_update(self) -> None: """Update attributes when the coordinator updates.""" diff --git a/homeassistant/components/alarm_control_panel/icons.json b/homeassistant/components/alarm_control_panel/icons.json index 915448a9962..0295699bae9 100644 --- a/homeassistant/components/alarm_control_panel/icons.json +++ b/homeassistant/components/alarm_control_panel/icons.json @@ -15,12 +15,26 @@ } }, "services": { - "alarm_arm_away": "mdi:shield-lock", - "alarm_arm_home": "mdi:shield-home", - "alarm_arm_night": "mdi:shield-moon", - "alarm_arm_custom_bypass": "mdi:security", - "alarm_disarm": "mdi:shield-off", - "alarm_trigger": "mdi:bell-ring", - "alarm_arm_vacation": "mdi:shield-airplane" + "alarm_arm_away": { + "service": "mdi:shield-lock" + }, + "alarm_arm_home": { + "service": "mdi:shield-home" + }, + "alarm_arm_night": { + "service": "mdi:shield-moon" + }, + "alarm_arm_custom_bypass": { + "service": "mdi:security" + }, + "alarm_disarm": { + "service": "mdi:shield-off" + }, + "alarm_trigger": { + "service": "mdi:bell-ring" + }, + "alarm_arm_vacation": { + "service": "mdi:shield-airplane" + } } } diff --git a/homeassistant/components/alarmdecoder/icons.json b/homeassistant/components/alarmdecoder/icons.json index 80835a049c8..ccb89749d2d 100644 --- a/homeassistant/components/alarmdecoder/icons.json +++ b/homeassistant/components/alarmdecoder/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "alarm_keypress": "mdi:dialpad", - "alarm_toggle_chime": "mdi:abc" + "alarm_keypress": { + "service": "mdi:dialpad" + }, + "alarm_toggle_chime": { + "service": "mdi:abc" + } } } diff --git a/homeassistant/components/alert/__init__.py b/homeassistant/components/alert/__init__.py index 1ffeb7c73ac..c49e14f2c6f 100644 --- a/homeassistant/components/alert/__init__.py +++ b/homeassistant/components/alert/__init__.py @@ -124,9 +124,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if not entities: return False - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") await component.async_add_entities(entities) @@ -162,16 +162,8 @@ class Alert(Entity): self._data = data self._message_template = message_template - if self._message_template is not None: - self._message_template.hass = hass - self._done_message_template = done_message_template - if self._done_message_template is not None: - self._done_message_template.hass = hass - self._title_template = title_template - if self._title_template is not None: - self._title_template.hass = hass self._notifiers = notifiers self._can_ack = can_ack diff --git a/homeassistant/components/alert/icons.json b/homeassistant/components/alert/icons.json index 7f5258706d2..5d8613ec592 100644 --- a/homeassistant/components/alert/icons.json +++ b/homeassistant/components/alert/icons.json @@ -1,7 +1,13 @@ { "services": { - "toggle": "mdi:bell-ring", - "turn_off": "mdi:bell-off", - "turn_on": "mdi:bell-alert" + "toggle": { + "service": "mdi:bell-ring" + }, + "turn_off": { + "service": "mdi:bell-off" + }, + "turn_on": { + "service": "mdi:bell-alert" + } } } diff --git a/homeassistant/components/alexa/flash_briefings.py b/homeassistant/components/alexa/flash_briefings.py index eed700602ce..0d75ee04b7a 100644 --- a/homeassistant/components/alexa/flash_briefings.py +++ b/homeassistant/components/alexa/flash_briefings.py @@ -52,7 +52,6 @@ class AlexaFlashBriefingView(http.HomeAssistantView): """Initialize Alexa view.""" super().__init__() self.flash_briefings = flash_briefings - template.attach(hass, self.flash_briefings) @callback def get( diff --git a/homeassistant/components/alexa/handlers.py b/homeassistant/components/alexa/handlers.py index 53bf6702138..3571f436ff6 100644 --- a/homeassistant/components/alexa/handlers.py +++ b/homeassistant/components/alexa/handlers.py @@ -1206,7 +1206,7 @@ async def async_api_set_mode( raise AlexaInvalidValueError(msg) # Remote Activity - if instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}": + elif instance == f"{remote.DOMAIN}.{remote.ATTR_ACTIVITY}": activity = mode.split(".")[1] activities: list[str] | None = entity.attributes.get(remote.ATTR_ACTIVITY_LIST) if activity != PRESET_MODE_NA and activities and activity in activities: diff --git a/homeassistant/components/alexa/resources.py b/homeassistant/components/alexa/resources.py index 7782716798a..4541801d31f 100644 --- a/homeassistant/components/alexa/resources.py +++ b/homeassistant/components/alexa/resources.py @@ -283,7 +283,7 @@ class AlexaPresetResource(AlexaCapabilityResource): """Implements Alexa PresetResources. Use presetResources with RangeController to provide a set of - friendlyNamesfor each RangeController preset. + friendlyNames for each RangeController preset. https://developer.amazon.com/docs/device-apis/resources-and-assets.html#presetresources """ diff --git a/homeassistant/components/alexa/smart_home.py b/homeassistant/components/alexa/smart_home.py index 57c1ba791ba..d7bcfa5698e 100644 --- a/homeassistant/components/alexa/smart_home.py +++ b/homeassistant/components/alexa/smart_home.py @@ -194,7 +194,7 @@ async def async_handle_message( try: if not enabled: - raise AlexaBridgeUnreachableError( + raise AlexaBridgeUnreachableError( # noqa: TRY301 "Alexa API not enabled in Home Assistant configuration" ) diff --git a/homeassistant/components/amazon_polly/const.py b/homeassistant/components/amazon_polly/const.py index bb196544fc3..40b1bba3ddd 100644 --- a/homeassistant/components/amazon_polly/const.py +++ b/homeassistant/components/amazon_polly/const.py @@ -8,128 +8,23 @@ CONF_REGION: Final = "region_name" CONF_ACCESS_KEY_ID: Final = "aws_access_key_id" CONF_SECRET_ACCESS_KEY: Final = "aws_secret_access_key" -DEFAULT_REGION: Final = "us-east-1" -SUPPORTED_REGIONS: Final[list[str]] = [ - "us-east-1", - "us-east-2", - "us-west-1", - "us-west-2", - "ca-central-1", - "eu-west-1", - "eu-central-1", - "eu-west-2", - "eu-west-3", - "ap-southeast-1", - "ap-southeast-2", - "ap-northeast-2", - "ap-northeast-1", - "ap-south-1", - "sa-east-1", -] - CONF_ENGINE: Final = "engine" CONF_VOICE: Final = "voice" CONF_OUTPUT_FORMAT: Final = "output_format" CONF_SAMPLE_RATE: Final = "sample_rate" CONF_TEXT_TYPE: Final = "text_type" -SUPPORTED_VOICES: Final[list[str]] = [ - "Aditi", # Hindi - "Amy", # English (British) - "Aria", # English (New Zealand), Neural - "Arlet", # Catalan, Neural - "Arthur", # English, Neural - "Astrid", # Swedish - "Ayanda", # English (South African), Neural - "Bianca", # Italian - "Brian", # English (British) - "Camila", # Portuguese, Brazilian - "Carla", # Italian - "Carmen", # Romanian - "Celine", # French - "Chantal", # French Canadian - "Conchita", # Spanish (European) - "Cristiano", # Portuguese (European) - "Daniel", # German, Neural - "Dora", # Icelandic - "Elin", # Swedish, Neural - "Emma", # English - "Enrique", # Spanish (European) - "Ewa", # Polish - "Filiz", # Turkish - "Gabrielle", # French (Canadian) - "Geraint", # English Welsh - "Giorgio", # Italian - "Gwyneth", # Welsh - "Hala", # Arabic (Gulf), Neural - "Hannah", # German (Austrian), Neural - "Hans", # German - "Hiujin", # Chinese (Cantonese), Neural - "Ida", # Norwegian, Neural - "Ines", # Portuguese, European # codespell:ignore ines - "Ivy", # English - "Jacek", # Polish - "Jan", # Polish - "Joanna", # English - "Joey", # English - "Justin", # English - "Kajal", # English (Indian)/Hindi (Bilingual ), Neural - "Karl", # Icelandic - "Kendra", # English - "Kevin", # English, Neural - "Kimberly", # English - "Laura", # Dutch, Neural - "Lea", # French - "Liam", # Canadian French, Neural - "Liv", # Norwegian - "Lotte", # Dutch - "Lucia", # Spanish European - "Lupe", # Spanish US - "Mads", # Danish - "Maja", # Polish - "Marlene", # German - "Mathieu", # French - "Matthew", # English - "Maxim", # Russian - "Mia", # Spanish Mexican - "Miguel", # Spanish US - "Mizuki", # Japanese - "Naja", # Danish - "Nicole", # English Australian - "Ola", # Polish, Neural - "Olivia", # Female, Australian, Neural - "Penelope", # Spanish US - "Pedro", # Spanish US, Neural - "Raveena", # English, Indian - "Ricardo", # Portuguese (Brazilian) - "Ruben", # Dutch - "Russell", # English (Australian) - "Ruth", # English, Neural - "Salli", # English - "Seoyeon", # Korean - "Stephen", # English, Neural - "Suvi", # Finnish - "Takumi", # Japanese - "Tatyana", # Russian - "Vicki", # German - "Vitoria", # Portuguese, Brazilian - "Zeina", # Arabic - "Zhiyu", # Chinese -] +SUPPORTED_OUTPUT_FORMATS: Final[set[str]] = {"mp3", "ogg_vorbis", "pcm"} -SUPPORTED_OUTPUT_FORMATS: Final[list[str]] = ["mp3", "ogg_vorbis", "pcm"] +SUPPORTED_SAMPLE_RATES: Final[set[str]] = {"8000", "16000", "22050", "24000"} -SUPPORTED_ENGINES: Final[list[str]] = ["neural", "standard"] - -SUPPORTED_SAMPLE_RATES: Final[list[str]] = ["8000", "16000", "22050", "24000"] - -SUPPORTED_SAMPLE_RATES_MAP: Final[dict[str, list[str]]] = { - "mp3": ["8000", "16000", "22050", "24000"], - "ogg_vorbis": ["8000", "16000", "22050"], - "pcm": ["8000", "16000"], +SUPPORTED_SAMPLE_RATES_MAP: Final[dict[str, set[str]]] = { + "mp3": {"8000", "16000", "22050", "24000"}, + "ogg_vorbis": {"8000", "16000", "22050"}, + "pcm": {"8000", "16000"}, } -SUPPORTED_TEXT_TYPES: Final[list[str]] = ["text", "ssml"] +SUPPORTED_TEXT_TYPES: Final[set[str]] = {"text", "ssml"} CONTENT_TYPE_EXTENSIONS: Final[dict[str, str]] = { "audio/mpeg": "mp3", @@ -137,6 +32,8 @@ CONTENT_TYPE_EXTENSIONS: Final[dict[str, str]] = { "audio/pcm": "pcm", } +DEFAULT_REGION: Final = "us-east-1" + DEFAULT_ENGINE: Final = "standard" DEFAULT_VOICE: Final = "Joanna" DEFAULT_OUTPUT_FORMAT: Final = "mp3" diff --git a/homeassistant/components/amazon_polly/tts.py b/homeassistant/components/amazon_polly/tts.py index d5cb7092fe3..1fc972fa3a1 100644 --- a/homeassistant/components/amazon_polly/tts.py +++ b/homeassistant/components/amazon_polly/tts.py @@ -16,6 +16,11 @@ from homeassistant.components.tts import ( ) from homeassistant.const import ATTR_CREDENTIALS, CONF_PROFILE_NAME from homeassistant.core import HomeAssistant +from homeassistant.generated.amazon_polly import ( + SUPPORTED_ENGINES, + SUPPORTED_REGIONS, + SUPPORTED_VOICES, +) import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -38,13 +43,10 @@ from .const import ( DEFAULT_SAMPLE_RATES, DEFAULT_TEXT_TYPE, DEFAULT_VOICE, - SUPPORTED_ENGINES, SUPPORTED_OUTPUT_FORMATS, - SUPPORTED_REGIONS, SUPPORTED_SAMPLE_RATES, SUPPORTED_SAMPLE_RATES_MAP, SUPPORTED_TEXT_TYPES, - SUPPORTED_VOICES, ) _LOGGER: Final = logging.getLogger(__name__) diff --git a/homeassistant/components/ambient_network/manifest.json b/homeassistant/components/ambient_network/manifest.json index 553adb240b0..4800ffcb29d 100644 --- a/homeassistant/components/ambient_network/manifest.json +++ b/homeassistant/components/ambient_network/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["aioambient"], - "requirements": ["aioambient==2024.01.0"] + "requirements": ["aioambient==2024.08.0"] } diff --git a/homeassistant/components/ambient_station/__init__.py b/homeassistant/components/ambient_station/__init__.py index d0b04e53e67..469ad7e6e06 100644 --- a/homeassistant/components/ambient_station/__init__.py +++ b/homeassistant/components/ambient_station/__init__.py @@ -17,7 +17,6 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv import homeassistant.helpers.device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send import homeassistant.helpers.entity_registry as er @@ -25,7 +24,6 @@ import homeassistant.helpers.entity_registry as er from .const import ( ATTR_LAST_DATA, CONF_APP_KEY, - DOMAIN, LOGGER, TYPE_SOLARRADIATION, TYPE_SOLARRADIATION_LX, @@ -37,7 +35,6 @@ DATA_CONFIG = "config" DEFAULT_SOCKET_MIN_RETRY = 15 -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) type AmbientStationConfigEntry = ConfigEntry[AmbientStation] diff --git a/homeassistant/components/ambient_station/config_flow.py b/homeassistant/components/ambient_station/config_flow.py index 66e603ba2ff..072ca68b865 100644 --- a/homeassistant/components/ambient_station/config_flow.py +++ b/homeassistant/components/ambient_station/config_flow.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import Any + from aioambient import API from aioambient.errors import AmbientError import voluptuous as vol @@ -32,7 +34,9 @@ class AmbientStationFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors if errors else {}, ) - async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult: + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" if not user_input: return await self._show_form() diff --git a/homeassistant/components/ambient_station/manifest.json b/homeassistant/components/ambient_station/manifest.json index 046ab9f73e9..a14de5f37c5 100644 --- a/homeassistant/components/ambient_station/manifest.json +++ b/homeassistant/components/ambient_station/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_push", "loggers": ["aioambient"], - "requirements": ["aioambient==2024.01.0"] + "requirements": ["aioambient==2024.08.0"] } diff --git a/homeassistant/components/amcrest/camera.py b/homeassistant/components/amcrest/camera.py index b9b2701eac6..0bf02b604f1 100644 --- a/homeassistant/components/amcrest/camera.py +++ b/homeassistant/components/amcrest/camera.py @@ -499,7 +499,7 @@ class AmcrestCam(Camera): await getattr(self, f"_async_set_{func}")(value) new_value = await getattr(self, f"_async_get_{func}")() if new_value != value: - raise AmcrestCommandFailed + raise AmcrestCommandFailed # noqa: TRY301 except (AmcrestError, AmcrestCommandFailed) as error: if tries == 1: log_update_error(_LOGGER, action, self.name, description, error) diff --git a/homeassistant/components/amcrest/icons.json b/homeassistant/components/amcrest/icons.json index efba49d6b56..e284bc15259 100644 --- a/homeassistant/components/amcrest/icons.json +++ b/homeassistant/components/amcrest/icons.json @@ -1,15 +1,37 @@ { "services": { - "enable_recording": "mdi:record-rec", - "disable_recording": "mdi:stop", - "enable_audio": "mdi:volume-high", - "disable_audio": "mdi:volume-off", - "enable_motion_recording": "mdi:motion-sensor", - "disable_motion_recording": "mdi:motion-sensor-off", - "goto_preset": "mdi:pan", - "set_color_bw": "mdi:palette", - "start_tour": "mdi:panorama", - "stop_tour": "mdi:panorama-outline", - "ptz_control": "mdi:pan" + "enable_recording": { + "service": "mdi:record-rec" + }, + "disable_recording": { + "service": "mdi:stop" + }, + "enable_audio": { + "service": "mdi:volume-high" + }, + "disable_audio": { + "service": "mdi:volume-off" + }, + "enable_motion_recording": { + "service": "mdi:motion-sensor" + }, + "disable_motion_recording": { + "service": "mdi:motion-sensor-off" + }, + "goto_preset": { + "service": "mdi:pan" + }, + "set_color_bw": { + "service": "mdi:palette" + }, + "start_tour": { + "service": "mdi:panorama" + }, + "stop_tour": { + "service": "mdi:panorama-outline" + }, + "ptz_control": { + "service": "mdi:pan" + } } } diff --git a/homeassistant/components/android_ip_webcam/__init__.py b/homeassistant/components/android_ip_webcam/__init__.py index db50d6d3e1a..3772fe4642b 100644 --- a/homeassistant/components/android_ip_webcam/__init__.py +++ b/homeassistant/components/android_ip_webcam/__init__.py @@ -14,7 +14,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from .const import DOMAIN from .coordinator import AndroidIPCamDataUpdateCoordinator @@ -27,9 +26,6 @@ PLATFORMS: list[Platform] = [ ] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Android IP Webcam from a config entry.""" websession = async_get_clientsession(hass) diff --git a/homeassistant/components/androidtv/icons.json b/homeassistant/components/androidtv/icons.json index 0127d60a72e..d7c646dfdfc 100644 --- a/homeassistant/components/androidtv/icons.json +++ b/homeassistant/components/androidtv/icons.json @@ -1,8 +1,16 @@ { "services": { - "adb_command": "mdi:console", - "download": "mdi:download", - "upload": "mdi:upload", - "learn_sendevent": "mdi:remote" + "adb_command": { + "service": "mdi:console" + }, + "download": { + "service": "mdi:download" + }, + "upload": { + "service": "mdi:upload" + }, + "learn_sendevent": { + "service": "mdi:remote" + } } } diff --git a/homeassistant/components/androidtv/media_player.py b/homeassistant/components/androidtv/media_player.py index 884b5f60f57..75cf6ead6c3 100644 --- a/homeassistant/components/androidtv/media_player.py +++ b/homeassistant/components/androidtv/media_player.py @@ -87,7 +87,7 @@ async def async_setup_entry( "adb_command", ) platform.async_register_entity_service( - SERVICE_LEARN_SENDEVENT, {}, "learn_sendevent" + SERVICE_LEARN_SENDEVENT, None, "learn_sendevent" ) platform.async_register_entity_service( SERVICE_DOWNLOAD, diff --git a/homeassistant/components/androidtv_remote/manifest.json b/homeassistant/components/androidtv_remote/manifest.json index e24fcc5d653..a06152fa570 100644 --- a/homeassistant/components/androidtv_remote/manifest.json +++ b/homeassistant/components/androidtv_remote/manifest.json @@ -8,6 +8,6 @@ "iot_class": "local_push", "loggers": ["androidtvremote2"], "quality_scale": "platinum", - "requirements": ["androidtvremote2==0.1.1"], + "requirements": ["androidtvremote2==0.1.2"], "zeroconf": ["_androidtvremote2._tcp.local."] } diff --git a/homeassistant/components/anthropic/__init__.py b/homeassistant/components/anthropic/__init__.py new file mode 100644 index 00000000000..aa6cf509fa1 --- /dev/null +++ b/homeassistant/components/anthropic/__init__.py @@ -0,0 +1,46 @@ +"""The Anthropic integration.""" + +from __future__ import annotations + +import anthropic + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_API_KEY, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryNotReady +from homeassistant.helpers import config_validation as cv + +from .const import DOMAIN, LOGGER + +PLATFORMS = (Platform.CONVERSATION,) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + +type AnthropicConfigEntry = ConfigEntry[anthropic.AsyncClient] + + +async def async_setup_entry(hass: HomeAssistant, entry: AnthropicConfigEntry) -> bool: + """Set up Anthropic from a config entry.""" + client = anthropic.AsyncAnthropic(api_key=entry.data[CONF_API_KEY]) + try: + await client.messages.create( + model="claude-3-haiku-20240307", + max_tokens=1, + messages=[{"role": "user", "content": "Hi"}], + timeout=10.0, + ) + except anthropic.AuthenticationError as err: + LOGGER.error("Invalid API key: %s", err) + return False + except anthropic.AnthropicError as err: + raise ConfigEntryNotReady(err) from err + + entry.runtime_data = client + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Unload Anthropic.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/anthropic/config_flow.py b/homeassistant/components/anthropic/config_flow.py new file mode 100644 index 00000000000..01e16ec5350 --- /dev/null +++ b/homeassistant/components/anthropic/config_flow.py @@ -0,0 +1,210 @@ +"""Config flow for Anthropic integration.""" + +from __future__ import annotations + +import logging +from types import MappingProxyType +from typing import Any + +import anthropic +import voluptuous as vol + +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) +from homeassistant.const import CONF_API_KEY, CONF_LLM_HASS_API +from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + SelectOptionDict, + SelectSelector, + SelectSelectorConfig, + TemplateSelector, +) + +from .const import ( + CONF_CHAT_MODEL, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_RECOMMENDED, + CONF_TEMPERATURE, + DOMAIN, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, + RECOMMENDED_TEMPERATURE, +) + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_API_KEY): str, + } +) + +RECOMMENDED_OPTIONS = { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: llm.LLM_API_ASSIST, + CONF_PROMPT: llm.DEFAULT_INSTRUCTIONS_PROMPT, +} + + +async def validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None: + """Validate the user input allows us to connect. + + Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. + """ + client = anthropic.AsyncAnthropic(api_key=data[CONF_API_KEY]) + await client.messages.create( + model="claude-3-haiku-20240307", + max_tokens=1, + messages=[{"role": "user", "content": "Hi"}], + timeout=10.0, + ) + + +class AnthropicConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Anthropic.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors = {} + + if user_input is not None: + try: + await validate_input(self.hass, user_input) + except anthropic.APITimeoutError: + errors["base"] = "timeout_connect" + except anthropic.APIConnectionError: + errors["base"] = "cannot_connect" + except anthropic.APIStatusError as e: + if isinstance(e.body, dict): + errors["base"] = e.body.get("error", {}).get("type", "unknown") + else: + errors["base"] = "unknown" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title="Claude", + data=user_input, + options=RECOMMENDED_OPTIONS, + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors or None + ) + + @staticmethod + def async_get_options_flow( + config_entry: ConfigEntry, + ) -> OptionsFlow: + """Create the options flow.""" + return AnthropicOptionsFlow(config_entry) + + +class AnthropicOptionsFlow(OptionsFlow): + """Anthropic config flow options handler.""" + + def __init__(self, config_entry: ConfigEntry) -> None: + """Initialize options flow.""" + self.config_entry = config_entry + self.last_rendered_recommended = config_entry.options.get( + CONF_RECOMMENDED, False + ) + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options.""" + options: dict[str, Any] | MappingProxyType[str, Any] = self.config_entry.options + + if user_input is not None: + if user_input[CONF_RECOMMENDED] == self.last_rendered_recommended: + if user_input[CONF_LLM_HASS_API] == "none": + user_input.pop(CONF_LLM_HASS_API) + return self.async_create_entry(title="", data=user_input) + + # Re-render the options again, now with the recommended options shown/hidden + self.last_rendered_recommended = user_input[CONF_RECOMMENDED] + + options = { + CONF_RECOMMENDED: user_input[CONF_RECOMMENDED], + CONF_PROMPT: user_input[CONF_PROMPT], + CONF_LLM_HASS_API: user_input[CONF_LLM_HASS_API], + } + + suggested_values = options.copy() + if not suggested_values.get(CONF_PROMPT): + suggested_values[CONF_PROMPT] = llm.DEFAULT_INSTRUCTIONS_PROMPT + + schema = self.add_suggested_values_to_schema( + vol.Schema(anthropic_config_option_schema(self.hass, options)), + suggested_values, + ) + + return self.async_show_form( + step_id="init", + data_schema=schema, + ) + + +def anthropic_config_option_schema( + hass: HomeAssistant, + options: dict[str, Any] | MappingProxyType[str, Any], +) -> dict: + """Return a schema for Anthropic completion options.""" + hass_apis: list[SelectOptionDict] = [ + SelectOptionDict( + label="No control", + value="none", + ) + ] + hass_apis.extend( + SelectOptionDict( + label=api.name, + value=api.id, + ) + for api in llm.async_get_apis(hass) + ) + + schema = { + vol.Optional(CONF_PROMPT): TemplateSelector(), + vol.Optional(CONF_LLM_HASS_API, default="none"): SelectSelector( + SelectSelectorConfig(options=hass_apis) + ), + vol.Required( + CONF_RECOMMENDED, default=options.get(CONF_RECOMMENDED, False) + ): bool, + } + + if options.get(CONF_RECOMMENDED): + return schema + + schema.update( + { + vol.Optional( + CONF_CHAT_MODEL, + default=RECOMMENDED_CHAT_MODEL, + ): str, + vol.Optional( + CONF_MAX_TOKENS, + default=RECOMMENDED_MAX_TOKENS, + ): int, + vol.Optional( + CONF_TEMPERATURE, + default=RECOMMENDED_TEMPERATURE, + ): NumberSelector(NumberSelectorConfig(min=0, max=1, step=0.05)), + } + ) + return schema diff --git a/homeassistant/components/anthropic/const.py b/homeassistant/components/anthropic/const.py new file mode 100644 index 00000000000..0dbf9c51ac1 --- /dev/null +++ b/homeassistant/components/anthropic/const.py @@ -0,0 +1,15 @@ +"""Constants for the Anthropic integration.""" + +import logging + +DOMAIN = "anthropic" +LOGGER = logging.getLogger(__package__) + +CONF_RECOMMENDED = "recommended" +CONF_PROMPT = "prompt" +CONF_CHAT_MODEL = "chat_model" +RECOMMENDED_CHAT_MODEL = "claude-3-haiku-20240307" +CONF_MAX_TOKENS = "max_tokens" +RECOMMENDED_MAX_TOKENS = 1024 +CONF_TEMPERATURE = "temperature" +RECOMMENDED_TEMPERATURE = 1.0 diff --git a/homeassistant/components/anthropic/conversation.py b/homeassistant/components/anthropic/conversation.py new file mode 100644 index 00000000000..20e555e9592 --- /dev/null +++ b/homeassistant/components/anthropic/conversation.py @@ -0,0 +1,316 @@ +"""Conversation support for Anthropic.""" + +from collections.abc import Callable +import json +from typing import Any, Literal, cast + +import anthropic +from anthropic._types import NOT_GIVEN +from anthropic.types import ( + Message, + MessageParam, + TextBlock, + TextBlockParam, + ToolParam, + ToolResultBlockParam, + ToolUseBlock, + ToolUseBlockParam, +) +import voluptuous as vol +from voluptuous_openapi import convert + +from homeassistant.components import conversation +from homeassistant.components.conversation import trace +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_LLM_HASS_API, MATCH_ALL +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, TemplateError +from homeassistant.helpers import device_registry as dr, intent, llm, template +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.util import ulid + +from . import AnthropicConfigEntry +from .const import ( + CONF_CHAT_MODEL, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_TEMPERATURE, + DOMAIN, + LOGGER, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, + RECOMMENDED_TEMPERATURE, +) + +# Max number of back and forth with the LLM to generate a response +MAX_TOOL_ITERATIONS = 10 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: AnthropicConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up conversation entities.""" + agent = AnthropicConversationEntity(config_entry) + async_add_entities([agent]) + + +def _format_tool( + tool: llm.Tool, custom_serializer: Callable[[Any], Any] | None +) -> ToolParam: + """Format tool specification.""" + return ToolParam( + name=tool.name, + description=tool.description or "", + input_schema=convert(tool.parameters, custom_serializer=custom_serializer), + ) + + +def _message_convert( + message: Message, +) -> MessageParam: + """Convert from class to TypedDict.""" + param_content: list[TextBlockParam | ToolUseBlockParam] = [] + + for message_content in message.content: + if isinstance(message_content, TextBlock): + param_content.append(TextBlockParam(type="text", text=message_content.text)) + elif isinstance(message_content, ToolUseBlock): + param_content.append( + ToolUseBlockParam( + type="tool_use", + id=message_content.id, + name=message_content.name, + input=message_content.input, + ) + ) + + return MessageParam(role=message.role, content=param_content) + + +class AnthropicConversationEntity( + conversation.ConversationEntity, conversation.AbstractConversationAgent +): + """Anthropic conversation agent.""" + + _attr_has_entity_name = True + _attr_name = None + + def __init__(self, entry: AnthropicConfigEntry) -> None: + """Initialize the agent.""" + self.entry = entry + self.history: dict[str, list[MessageParam]] = {} + self._attr_unique_id = entry.entry_id + self._attr_device_info = dr.DeviceInfo( + identifiers={(DOMAIN, entry.entry_id)}, + manufacturer="Anthropic", + model="Claude", + entry_type=dr.DeviceEntryType.SERVICE, + ) + if self.entry.options.get(CONF_LLM_HASS_API): + self._attr_supported_features = ( + conversation.ConversationEntityFeature.CONTROL + ) + + @property + def supported_languages(self) -> list[str] | Literal["*"]: + """Return a list of supported languages.""" + return MATCH_ALL + + async def async_added_to_hass(self) -> None: + """When entity is added to Home Assistant.""" + await super().async_added_to_hass() + self.entry.async_on_unload( + self.entry.add_update_listener(self._async_entry_update_listener) + ) + + async def async_process( + self, user_input: conversation.ConversationInput + ) -> conversation.ConversationResult: + """Process a sentence.""" + options = self.entry.options + intent_response = intent.IntentResponse(language=user_input.language) + llm_api: llm.APIInstance | None = None + tools: list[ToolParam] | None = None + user_name: str | None = None + llm_context = llm.LLMContext( + platform=DOMAIN, + context=user_input.context, + user_prompt=user_input.text, + language=user_input.language, + assistant=conversation.DOMAIN, + device_id=user_input.device_id, + ) + + if options.get(CONF_LLM_HASS_API): + try: + llm_api = await llm.async_get_api( + self.hass, + options[CONF_LLM_HASS_API], + llm_context, + ) + except HomeAssistantError as err: + LOGGER.error("Error getting LLM API: %s", err) + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + f"Error preparing LLM API: {err}", + ) + return conversation.ConversationResult( + response=intent_response, conversation_id=user_input.conversation_id + ) + tools = [ + _format_tool(tool, llm_api.custom_serializer) for tool in llm_api.tools + ] + + if user_input.conversation_id is None: + conversation_id = ulid.ulid_now() + messages = [] + + elif user_input.conversation_id in self.history: + conversation_id = user_input.conversation_id + messages = self.history[conversation_id] + + else: + # Conversation IDs are ULIDs. We generate a new one if not provided. + # If an old OLID is passed in, we will generate a new one to indicate + # a new conversation was started. If the user picks their own, they + # want to track a conversation and we respect it. + try: + ulid.ulid_to_bytes(user_input.conversation_id) + conversation_id = ulid.ulid_now() + except ValueError: + conversation_id = user_input.conversation_id + + messages = [] + + if ( + user_input.context + and user_input.context.user_id + and ( + user := await self.hass.auth.async_get_user(user_input.context.user_id) + ) + ): + user_name = user.name + + try: + prompt_parts = [ + template.Template( + llm.BASE_PROMPT + + options.get(CONF_PROMPT, llm.DEFAULT_INSTRUCTIONS_PROMPT), + self.hass, + ).async_render( + { + "ha_name": self.hass.config.location_name, + "user_name": user_name, + "llm_context": llm_context, + }, + parse_result=False, + ) + ] + + except TemplateError as err: + LOGGER.error("Error rendering prompt: %s", err) + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + f"Sorry, I had a problem with my template: {err}", + ) + return conversation.ConversationResult( + response=intent_response, conversation_id=conversation_id + ) + + if llm_api: + prompt_parts.append(llm_api.api_prompt) + + prompt = "\n".join(prompt_parts) + + # Create a copy of the variable because we attach it to the trace + messages = [*messages, MessageParam(role="user", content=user_input.text)] + + LOGGER.debug("Prompt: %s", messages) + LOGGER.debug("Tools: %s", tools) + trace.async_conversation_trace_append( + trace.ConversationTraceEventType.AGENT_DETAIL, + {"system": prompt, "messages": messages}, + ) + + client = self.entry.runtime_data + + # To prevent infinite loops, we limit the number of iterations + for _iteration in range(MAX_TOOL_ITERATIONS): + try: + response = await client.messages.create( + model=options.get(CONF_CHAT_MODEL, RECOMMENDED_CHAT_MODEL), + messages=messages, + tools=tools or NOT_GIVEN, + max_tokens=options.get(CONF_MAX_TOKENS, RECOMMENDED_MAX_TOKENS), + system=prompt, + temperature=options.get(CONF_TEMPERATURE, RECOMMENDED_TEMPERATURE), + ) + except anthropic.AnthropicError as err: + intent_response.async_set_error( + intent.IntentResponseErrorCode.UNKNOWN, + f"Sorry, I had a problem talking to Anthropic: {err}", + ) + return conversation.ConversationResult( + response=intent_response, conversation_id=conversation_id + ) + + LOGGER.debug("Response %s", response) + + messages.append(_message_convert(response)) + + if response.stop_reason != "tool_use" or not llm_api: + break + + tool_results: list[ToolResultBlockParam] = [] + for tool_call in response.content: + if isinstance(tool_call, TextBlock): + LOGGER.info(tool_call.text) + + if not isinstance(tool_call, ToolUseBlock): + continue + + tool_input = llm.ToolInput( + tool_name=tool_call.name, + tool_args=cast(dict[str, Any], tool_call.input), + ) + LOGGER.debug( + "Tool call: %s(%s)", tool_input.tool_name, tool_input.tool_args + ) + + try: + tool_response = await llm_api.async_call_tool(tool_input) + except (HomeAssistantError, vol.Invalid) as e: + tool_response = {"error": type(e).__name__} + if str(e): + tool_response["error_text"] = str(e) + + LOGGER.debug("Tool response: %s", tool_response) + tool_results.append( + ToolResultBlockParam( + type="tool_result", + tool_use_id=tool_call.id, + content=json.dumps(tool_response), + ) + ) + + messages.append(MessageParam(role="user", content=tool_results)) + + self.history[conversation_id] = messages + + for content in response.content: + if isinstance(content, TextBlock): + intent_response.async_set_speech(content.text) + break + + return conversation.ConversationResult( + response=intent_response, conversation_id=conversation_id + ) + + async def _async_entry_update_listener( + self, hass: HomeAssistant, entry: ConfigEntry + ) -> None: + """Handle options update.""" + # Reload as we update device info + entity name + supported features + await hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/anthropic/manifest.json b/homeassistant/components/anthropic/manifest.json new file mode 100644 index 00000000000..7d51c458e4d --- /dev/null +++ b/homeassistant/components/anthropic/manifest.json @@ -0,0 +1,12 @@ +{ + "domain": "anthropic", + "name": "Anthropic Conversation", + "after_dependencies": ["assist_pipeline", "intent"], + "codeowners": ["@Shulyaka"], + "config_flow": true, + "dependencies": ["conversation"], + "documentation": "https://www.home-assistant.io/integrations/anthropic", + "integration_type": "service", + "iot_class": "cloud_polling", + "requirements": ["anthropic==0.31.2"] +} diff --git a/homeassistant/components/anthropic/strings.json b/homeassistant/components/anthropic/strings.json new file mode 100644 index 00000000000..9550a1a6672 --- /dev/null +++ b/homeassistant/components/anthropic/strings.json @@ -0,0 +1,34 @@ +{ + "config": { + "step": { + "user": { + "data": { + "api_key": "[%key:common::config_flow::data::api_key%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "timeout_connect": "[%key:common::config_flow::error::timeout_connect%]", + "authentication_error": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + } + }, + "options": { + "step": { + "init": { + "data": { + "prompt": "Instructions", + "chat_model": "[%key:common::generic::model%]", + "max_tokens": "Maximum tokens to return in response", + "temperature": "Temperature", + "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", + "recommended": "Recommended model settings" + }, + "data_description": { + "prompt": "Instruct how the LLM should respond. This can be a template." + } + } + } + } +} diff --git a/homeassistant/components/apcupsd/__init__.py b/homeassistant/components/apcupsd/__init__.py index 73ed721158d..7293a42f7e7 100644 --- a/homeassistant/components/apcupsd/__init__.py +++ b/homeassistant/components/apcupsd/__init__.py @@ -8,7 +8,6 @@ from typing import Final from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from .const import DOMAIN from .coordinator import APCUPSdCoordinator @@ -17,8 +16,6 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS: Final = (Platform.BINARY_SENSOR, Platform.SENSOR) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Use config values to set up a function enabling status retrieval.""" diff --git a/homeassistant/components/apcupsd/const.py b/homeassistant/components/apcupsd/const.py index 56bf229579d..974c860afb8 100644 --- a/homeassistant/components/apcupsd/const.py +++ b/homeassistant/components/apcupsd/const.py @@ -6,4 +6,4 @@ DOMAIN: Final = "apcupsd" CONNECTION_TIMEOUT: int = 10 # Field name of last self test retrieved from apcupsd. -LASTSTEST: Final = "laststest" +LAST_S_TEST: Final = "laststest" diff --git a/homeassistant/components/apcupsd/sensor.py b/homeassistant/components/apcupsd/sensor.py index ff72208e9ce..d4bbfb148e5 100644 --- a/homeassistant/components/apcupsd/sensor.py +++ b/homeassistant/components/apcupsd/sensor.py @@ -13,7 +13,6 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, - STATE_UNKNOWN, UnitOfApparentPower, UnitOfElectricCurrent, UnitOfElectricPotential, @@ -26,7 +25,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, LASTSTEST +from .const import DOMAIN, LAST_S_TEST from .coordinator import APCUPSdCoordinator PARALLEL_UPDATES = 0 @@ -157,8 +156,8 @@ SENSORS: dict[str, SensorEntityDescription] = { device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, ), - LASTSTEST: SensorEntityDescription( - key=LASTSTEST, + LAST_S_TEST: SensorEntityDescription( + key=LAST_S_TEST, translation_key="last_self_test", ), "lastxfer": SensorEntityDescription( @@ -423,7 +422,7 @@ async def async_setup_entry( # periodical (or manual) self test since last daemon restart. It might not be available # when we set up the integration, and we do not know if it would ever be available. Here we # add it anyway and mark it as unknown initially. - for resource in available_resources | {LASTSTEST}: + for resource in available_resources | {LAST_S_TEST}: if resource not in SENSORS: _LOGGER.warning("Invalid resource from APCUPSd: %s", resource.upper()) continue @@ -484,7 +483,7 @@ class APCUPSdSensor(CoordinatorEntity[APCUPSdCoordinator], SensorEntity): # performed) and may disappear again after certain event. So we mark the state as "unknown" # when it becomes unknown after such events. if key not in self.coordinator.data: - self._attr_native_value = STATE_UNKNOWN + self._attr_native_value = None return self._attr_native_value, inferred_unit = infer_unit(self.coordinator.data[key]) diff --git a/homeassistant/components/apple_tv/manifest.json b/homeassistant/components/apple_tv/manifest.json index 1f7ac45372e..9a053829516 100644 --- a/homeassistant/components/apple_tv/manifest.json +++ b/homeassistant/components/apple_tv/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/apple_tv", "iot_class": "local_push", "loggers": ["pyatv", "srptools"], - "requirements": ["pyatv==0.14.3"], + "requirements": ["pyatv==0.15.0"], "zeroconf": [ "_mediaremotetv._tcp.local.", "_companion-link._tcp.local.", diff --git a/homeassistant/components/apsystems/__init__.py b/homeassistant/components/apsystems/__init__.py index 91650201a87..372ce52e049 100644 --- a/homeassistant/components/apsystems/__init__.py +++ b/homeassistant/components/apsystems/__init__.py @@ -13,7 +13,12 @@ from homeassistant.core import HomeAssistant from .const import DEFAULT_PORT from .coordinator import ApSystemsDataCoordinator -PLATFORMS: list[Platform] = [Platform.NUMBER, Platform.SENSOR, Platform.SWITCH] +PLATFORMS: list[Platform] = [ + Platform.BINARY_SENSOR, + Platform.NUMBER, + Platform.SENSOR, + Platform.SWITCH, +] @dataclass diff --git a/homeassistant/components/apsystems/binary_sensor.py b/homeassistant/components/apsystems/binary_sensor.py new file mode 100644 index 00000000000..9e361ca883e --- /dev/null +++ b/homeassistant/components/apsystems/binary_sensor.py @@ -0,0 +1,102 @@ +"""The read-only binary sensors for APsystems local API integration.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from APsystemsEZ1 import ReturnAlarmInfo + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import ApSystemsConfigEntry, ApSystemsData +from .coordinator import ApSystemsDataCoordinator +from .entity import ApSystemsEntity + + +@dataclass(frozen=True, kw_only=True) +class ApsystemsLocalApiBinarySensorDescription(BinarySensorEntityDescription): + """Describes Apsystens Inverter binary sensor entity.""" + + is_on: Callable[[ReturnAlarmInfo], bool | None] + + +BINARY_SENSORS: tuple[ApsystemsLocalApiBinarySensorDescription, ...] = ( + ApsystemsLocalApiBinarySensorDescription( + key="off_grid_status", + translation_key="off_grid_status", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on=lambda c: c.offgrid, + ), + ApsystemsLocalApiBinarySensorDescription( + key="dc_1_short_circuit_error_status", + translation_key="dc_1_short_circuit_error_status", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on=lambda c: c.shortcircuit_1, + ), + ApsystemsLocalApiBinarySensorDescription( + key="dc_2_short_circuit_error_status", + translation_key="dc_2_short_circuit_error_status", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on=lambda c: c.shortcircuit_2, + ), + ApsystemsLocalApiBinarySensorDescription( + key="output_fault_status", + translation_key="output_fault_status", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + is_on=lambda c: not c.operating, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ApSystemsConfigEntry, + add_entities: AddEntitiesCallback, +) -> None: + """Set up the binary sensor platform.""" + config = config_entry.runtime_data + + add_entities( + ApSystemsBinarySensorWithDescription( + data=config, + entity_description=desc, + ) + for desc in BINARY_SENSORS + ) + + +class ApSystemsBinarySensorWithDescription( + CoordinatorEntity[ApSystemsDataCoordinator], ApSystemsEntity, BinarySensorEntity +): + """Base binary sensor to be used with description.""" + + entity_description: ApsystemsLocalApiBinarySensorDescription + + def __init__( + self, + data: ApSystemsData, + entity_description: ApsystemsLocalApiBinarySensorDescription, + ) -> None: + """Initialize the sensor.""" + super().__init__(data.coordinator) + ApSystemsEntity.__init__(self, data) + self.entity_description = entity_description + self._attr_unique_id = f"{data.device_id}_{entity_description.key}" + + @property + def is_on(self) -> bool | None: + """Return value of sensor.""" + return self.entity_description.is_on(self.coordinator.data.alarm_info) diff --git a/homeassistant/components/apsystems/coordinator.py b/homeassistant/components/apsystems/coordinator.py index f2d076ce3fd..6ba4f01dbc8 100644 --- a/homeassistant/components/apsystems/coordinator.py +++ b/homeassistant/components/apsystems/coordinator.py @@ -2,17 +2,26 @@ from __future__ import annotations +from dataclasses import dataclass from datetime import timedelta -from APsystemsEZ1 import APsystemsEZ1M, ReturnOutputData +from APsystemsEZ1 import APsystemsEZ1M, ReturnAlarmInfo, ReturnOutputData from homeassistant.core import HomeAssistant -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import LOGGER -class ApSystemsDataCoordinator(DataUpdateCoordinator[ReturnOutputData]): +@dataclass +class ApSystemsSensorData: + """Representing different Apsystems sensor data.""" + + output_data: ReturnOutputData + alarm_info: ReturnAlarmInfo + + +class ApSystemsDataCoordinator(DataUpdateCoordinator[ApSystemsSensorData]): """Coordinator used for all sensors.""" def __init__(self, hass: HomeAssistant, api: APsystemsEZ1M) -> None: @@ -25,5 +34,14 @@ class ApSystemsDataCoordinator(DataUpdateCoordinator[ReturnOutputData]): ) self.api = api - async def _async_update_data(self) -> ReturnOutputData: - return await self.api.get_output_data() + async def _async_setup(self) -> None: + try: + max_power = (await self.api.get_device_info()).maxPower + except (ConnectionError, TimeoutError): + raise UpdateFailed from None + self.api.max_power = max_power + + async def _async_update_data(self) -> ApSystemsSensorData: + output_data = await self.api.get_output_data() + alarm_info = await self.api.get_alarm_info() + return ApSystemsSensorData(output_data=output_data, alarm_info=alarm_info) diff --git a/homeassistant/components/apsystems/manifest.json b/homeassistant/components/apsystems/manifest.json index cba3e59dba0..9376d21ba28 100644 --- a/homeassistant/components/apsystems/manifest.json +++ b/homeassistant/components/apsystems/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/apsystems", "integration_type": "device", "iot_class": "local_polling", - "requirements": ["apsystems-ez1==1.3.3"] + "requirements": ["apsystems-ez1==2.2.1"] } diff --git a/homeassistant/components/apsystems/number.py b/homeassistant/components/apsystems/number.py index f9b535d7d6a..51e7130587f 100644 --- a/homeassistant/components/apsystems/number.py +++ b/homeassistant/components/apsystems/number.py @@ -26,7 +26,6 @@ async def async_setup_entry( class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): """Base sensor to be used with description.""" - _attr_native_max_value = 800 _attr_native_min_value = 30 _attr_native_step = 1 _attr_device_class = NumberDeviceClass.POWER @@ -42,6 +41,7 @@ class ApSystemsMaxOutputNumber(ApSystemsEntity, NumberEntity): super().__init__(data) self._api = data.coordinator.api self._attr_unique_id = f"{data.device_id}_output_limit" + self._attr_native_max_value = data.coordinator.api.max_power async def async_update(self) -> None: """Set the state with the value fetched from the inverter.""" diff --git a/homeassistant/components/apsystems/sensor.py b/homeassistant/components/apsystems/sensor.py index 637def4e418..afeb9d071ab 100644 --- a/homeassistant/components/apsystems/sensor.py +++ b/homeassistant/components/apsystems/sensor.py @@ -148,4 +148,4 @@ class ApSystemsSensorWithDescription( @property def native_value(self) -> StateType: """Return value of sensor.""" - return self.entity_description.value_fn(self.coordinator.data) + return self.entity_description.value_fn(self.coordinator.data.output_data) diff --git a/homeassistant/components/apsystems/strings.json b/homeassistant/components/apsystems/strings.json index 18200f7b49d..e02f86c2730 100644 --- a/homeassistant/components/apsystems/strings.json +++ b/homeassistant/components/apsystems/strings.json @@ -19,6 +19,20 @@ } }, "entity": { + "binary_sensor": { + "off_grid_status": { + "name": "Off grid status" + }, + "dc_1_short_circuit_error_status": { + "name": "DC 1 short circuit error status" + }, + "dc_2_short_circuit_error_status": { + "name": "DC 2 short circuit error status" + }, + "output_fault_status": { + "name": "Output fault status" + } + }, "sensor": { "total_power": { "name": "Total power" diff --git a/homeassistant/components/apsystems/switch.py b/homeassistant/components/apsystems/switch.py index 405adc94b27..93a21ec9f05 100644 --- a/homeassistant/components/apsystems/switch.py +++ b/homeassistant/components/apsystems/switch.py @@ -5,7 +5,6 @@ from __future__ import annotations from typing import Any from aiohttp.client_exceptions import ClientConnectionError -from APsystemsEZ1 import Status from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity from homeassistant.core import HomeAssistant @@ -45,12 +44,12 @@ class ApSystemsInverterSwitch(ApSystemsEntity, SwitchEntity): self._attr_available = False else: self._attr_available = True - self._attr_is_on = status == Status.normal + self._attr_is_on = status async def async_turn_on(self, **kwargs: Any) -> None: """Turn the switch on.""" - await self._api.set_device_power_status(0) + await self._api.set_device_power_status(True) async def async_turn_off(self, **kwargs: Any) -> None: """Turn the switch off.""" - await self._api.set_device_power_status(1) + await self._api.set_device_power_status(False) diff --git a/homeassistant/components/aquacell/__init__.py b/homeassistant/components/aquacell/__init__.py index 98cf5d7f0f0..e44c0f00fa8 100644 --- a/homeassistant/components/aquacell/__init__.py +++ b/homeassistant/components/aquacell/__init__.py @@ -3,12 +3,14 @@ from __future__ import annotations from aioaquacell import AquacellApi +from aioaquacell.const import Brand from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession +from .const import CONF_BRAND from .coordinator import AquacellCoordinator PLATFORMS = [Platform.SENSOR] @@ -20,7 +22,9 @@ async def async_setup_entry(hass: HomeAssistant, entry: AquacellConfigEntry) -> """Set up Aquacell from a config entry.""" session = async_get_clientsession(hass) - aquacell_api = AquacellApi(session) + brand = entry.data.get(CONF_BRAND, Brand.AQUACELL) + + aquacell_api = AquacellApi(session, brand) coordinator = AquacellCoordinator(hass, aquacell_api) diff --git a/homeassistant/components/aquacell/config_flow.py b/homeassistant/components/aquacell/config_flow.py index a9c749e9e2d..332cd16e749 100644 --- a/homeassistant/components/aquacell/config_flow.py +++ b/homeassistant/components/aquacell/config_flow.py @@ -7,18 +7,27 @@ import logging from typing import Any from aioaquacell import ApiException, AquacellApi, AuthenticationFailed +from aioaquacell.const import SUPPORTED_BRANDS, Brand import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, DOMAIN +from .const import ( + CONF_BRAND, + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, +) _LOGGER = logging.getLogger(__name__) DATA_SCHEMA = vol.Schema( { + vol.Required(CONF_BRAND, default=Brand.AQUACELL): vol.In( + {key: brand.name for key, brand in SUPPORTED_BRANDS.items()} + ), vol.Required(CONF_EMAIL): str, vol.Required(CONF_PASSWORD): str, } @@ -33,7 +42,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: - """Handle the initial step.""" + """Handle the cloud logon step.""" errors: dict[str, str] = {} if user_input is not None: await self.async_set_unique_id( @@ -42,7 +51,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): self._abort_if_unique_id_configured() session = async_get_clientsession(self.hass) - api = AquacellApi(session) + api = AquacellApi(session, user_input[CONF_BRAND]) try: refresh_token = await api.authenticate( user_input[CONF_EMAIL], user_input[CONF_PASSWORD] @@ -59,6 +68,7 @@ class AquaCellConfigFlow(ConfigFlow, domain=DOMAIN): title=user_input[CONF_EMAIL], data={ **user_input, + CONF_BRAND: user_input[CONF_BRAND], CONF_REFRESH_TOKEN: refresh_token, CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), }, diff --git a/homeassistant/components/aquacell/const.py b/homeassistant/components/aquacell/const.py index 96568d2286b..818c96fc53a 100644 --- a/homeassistant/components/aquacell/const.py +++ b/homeassistant/components/aquacell/const.py @@ -5,6 +5,7 @@ from datetime import timedelta DOMAIN = "aquacell" DATA_AQUACELL = "DATA_AQUACELL" +CONF_BRAND = "brand" CONF_REFRESH_TOKEN = "refresh_token" CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" diff --git a/homeassistant/components/aquacell/manifest.json b/homeassistant/components/aquacell/manifest.json index de4a9986d6e..2d8b80f4488 100644 --- a/homeassistant/components/aquacell/manifest.json +++ b/homeassistant/components/aquacell/manifest.json @@ -1,6 +1,6 @@ { "domain": "aquacell", - "name": "Aquacell", + "name": "AquaCell", "codeowners": ["@Jordi1990"], "config_flow": true, "dependencies": ["http", "network"], diff --git a/homeassistant/components/aquacell/strings.json b/homeassistant/components/aquacell/strings.json index 32b6bba943a..53304d04804 100644 --- a/homeassistant/components/aquacell/strings.json +++ b/homeassistant/components/aquacell/strings.json @@ -2,8 +2,9 @@ "config": { "step": { "user": { - "description": "Fill in your Aquacell mobile app credentials", + "description": "Select the brand of the softener and fill in your softener mobile app credentials", "data": { + "brand": "Brand", "email": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } diff --git a/homeassistant/components/aranet/icons.json b/homeassistant/components/aranet/icons.json index 6d6e9a83b03..8e2b66c0150 100644 --- a/homeassistant/components/aranet/icons.json +++ b/homeassistant/components/aranet/icons.json @@ -6,6 +6,9 @@ }, "radiation_rate": { "default": "mdi:radioactive" + }, + "radon_concentration": { + "default": "mdi:radioactive" } } } diff --git a/homeassistant/components/aranet/manifest.json b/homeassistant/components/aranet/manifest.json index 3f74d480c17..6cce7554dd1 100644 --- a/homeassistant/components/aranet/manifest.json +++ b/homeassistant/components/aranet/manifest.json @@ -19,5 +19,5 @@ "documentation": "https://www.home-assistant.io/integrations/aranet", "integration_type": "device", "iot_class": "local_push", - "requirements": ["aranet4==2.3.4"] + "requirements": ["aranet4==2.4.0"] } diff --git a/homeassistant/components/aranet/sensor.py b/homeassistant/components/aranet/sensor.py index c0fe194e87b..1dc4b9f956e 100644 --- a/homeassistant/components/aranet/sensor.py +++ b/homeassistant/components/aranet/sensor.py @@ -99,6 +99,13 @@ SENSOR_DESCRIPTIONS = { suggested_display_precision=4, scale=0.000001, ), + "radon_concentration": AranetSensorEntityDescription( + key="radon_concentration", + translation_key="radon_concentration", + name="Radon Concentration", + native_unit_of_measurement="Bq/m³", + state_class=SensorStateClass.MEASUREMENT, + ), "battery": AranetSensorEntityDescription( key="battery", name="Battery", diff --git a/homeassistant/components/arcam_fmj/__init__.py b/homeassistant/components/arcam_fmj/__init__.py index e1a2ee0a046..71639ed8388 100644 --- a/homeassistant/components/arcam_fmj/__init__.py +++ b/homeassistant/components/arcam_fmj/__init__.py @@ -11,12 +11,10 @@ from arcam.fmj.client import Client from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .const import ( DEFAULT_SCAN_INTERVAL, - DOMAIN, SIGNAL_CLIENT_DATA, SIGNAL_CLIENT_STARTED, SIGNAL_CLIENT_STOPPED, @@ -26,7 +24,6 @@ type ArcamFmjConfigEntry = ConfigEntry[Client] _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [Platform.MEDIA_PLAYER] diff --git a/homeassistant/components/arest/sensor.py b/homeassistant/components/arest/sensor.py index ab502fa275a..8c68c13018b 100644 --- a/homeassistant/components/arest/sensor.py +++ b/homeassistant/components/arest/sensor.py @@ -87,8 +87,6 @@ def setup_platform( if value_template is None: return lambda value: value - value_template.hass = hass - def _render(value): try: return value_template.async_render({"value": value}, parse_result=False) diff --git a/homeassistant/components/artsound/__init__.py b/homeassistant/components/artsound/__init__.py new file mode 100644 index 00000000000..149f06bc7c7 --- /dev/null +++ b/homeassistant/components/artsound/__init__.py @@ -0,0 +1 @@ +"""Virtual integration: ArtSound.""" diff --git a/homeassistant/components/artsound/manifest.json b/homeassistant/components/artsound/manifest.json new file mode 100644 index 00000000000..589ba862102 --- /dev/null +++ b/homeassistant/components/artsound/manifest.json @@ -0,0 +1,6 @@ +{ + "domain": "artsound", + "name": "ArtSound", + "integration_type": "virtual", + "supported_by": "linkplay" +} diff --git a/homeassistant/components/aseko_pool_live/config_flow.py b/homeassistant/components/aseko_pool_live/config_flow.py index cd2f0e4ac7f..ce6de3683d5 100644 --- a/homeassistant/components/aseko_pool_live/config_flow.py +++ b/homeassistant/components/aseko_pool_live/config_flow.py @@ -101,7 +101,7 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" @@ -109,10 +109,10 @@ class AsekoConfigFlow(ConfigFlow, domain=DOMAIN): self.context["entry_id"] ) - return await self.async_step_reauth_confirm(user_input) + return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( - self, user_input: Mapping | None = None + self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Dialog that informs the user that reauth is required.""" diff --git a/homeassistant/components/assist_pipeline/audio_enhancer.py b/homeassistant/components/assist_pipeline/audio_enhancer.py index c9c60f421b1..ff2b122187a 100644 --- a/homeassistant/components/assist_pipeline/audio_enhancer.py +++ b/homeassistant/components/assist_pipeline/audio_enhancer.py @@ -5,6 +5,7 @@ from dataclasses import dataclass import logging from pymicro_vad import MicroVad +from pyspeex_noise import AudioProcessor from .const import BYTES_PER_CHUNK @@ -41,8 +42,8 @@ class AudioEnhancer(ABC): """Enhance chunk of PCM audio @ 16Khz with 16-bit mono samples.""" -class MicroVadEnhancer(AudioEnhancer): - """Audio enhancer that just runs microVAD.""" +class MicroVadSpeexEnhancer(AudioEnhancer): + """Audio enhancer that runs microVAD and speex.""" def __init__( self, auto_gain: int, noise_suppression: int, is_vad_enabled: bool @@ -50,6 +51,24 @@ class MicroVadEnhancer(AudioEnhancer): """Initialize audio enhancer.""" super().__init__(auto_gain, noise_suppression, is_vad_enabled) + self.audio_processor: AudioProcessor | None = None + + # Scale from 0-4 + self.noise_suppression = noise_suppression * -15 + + # Scale from 0-31 + self.auto_gain = auto_gain * 300 + + if (self.auto_gain != 0) or (self.noise_suppression != 0): + self.audio_processor = AudioProcessor( + self.auto_gain, self.noise_suppression + ) + _LOGGER.debug( + "Initialized speex with auto_gain=%s, noise_suppression=%s", + self.auto_gain, + self.noise_suppression, + ) + self.vad: MicroVad | None = None self.threshold = 0.5 @@ -61,12 +80,17 @@ class MicroVadEnhancer(AudioEnhancer): """Enhance 10ms chunk of PCM audio @ 16Khz with 16-bit mono samples.""" is_speech: bool | None = None + assert len(audio) == BYTES_PER_CHUNK + if self.vad is not None: # Run VAD - assert len(audio) == BYTES_PER_CHUNK speech_prob = self.vad.Process10ms(audio) is_speech = speech_prob > self.threshold + if self.audio_processor is not None: + # Run noise suppression and auto gain + audio = self.audio_processor.Process10ms(audio).audio + return EnhancedAudioChunk( audio=audio, timestamp_ms=timestamp_ms, is_speech=is_speech ) diff --git a/homeassistant/components/assist_pipeline/manifest.json b/homeassistant/components/assist_pipeline/manifest.json index 00950b138fd..1b93ecd9eef 100644 --- a/homeassistant/components/assist_pipeline/manifest.json +++ b/homeassistant/components/assist_pipeline/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["pymicro-vad==1.0.1"] + "requirements": ["pymicro-vad==1.0.1", "pyspeex-noise==1.0.2"] } diff --git a/homeassistant/components/assist_pipeline/pipeline.py b/homeassistant/components/assist_pipeline/pipeline.py index 9fada934ca1..342f811c99b 100644 --- a/homeassistant/components/assist_pipeline/pipeline.py +++ b/homeassistant/components/assist_pipeline/pipeline.py @@ -49,7 +49,7 @@ from homeassistant.util import ( ) from homeassistant.util.limited_size_dict import LimitedSizeDict -from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadEnhancer +from .audio_enhancer import AudioEnhancer, EnhancedAudioChunk, MicroVadSpeexEnhancer from .const import ( BYTES_PER_CHUNK, CONF_DEBUG_RECORDING_DIR, @@ -589,7 +589,7 @@ class PipelineRun: # Initialize with audio settings if self.audio_settings.needs_processor and (self.audio_enhancer is None): # Default audio enhancer - self.audio_enhancer = MicroVadEnhancer( + self.audio_enhancer = MicroVadSpeexEnhancer( self.audio_settings.auto_gain_dbfs, self.audio_settings.noise_suppression_level, self.audio_settings.is_vad_enabled, diff --git a/homeassistant/components/assist_pipeline/vad.py b/homeassistant/components/assist_pipeline/vad.py index 49496e66159..4782d14dee4 100644 --- a/homeassistant/components/assist_pipeline/vad.py +++ b/homeassistant/components/assist_pipeline/vad.py @@ -6,13 +6,11 @@ from collections.abc import Callable, Iterable from dataclasses import dataclass from enum import StrEnum import logging -from typing import Final + +from .const import SAMPLE_CHANNELS, SAMPLE_RATE, SAMPLE_WIDTH _LOGGER = logging.getLogger(__name__) -_SAMPLE_RATE: Final = 16000 # Hz -_SAMPLE_WIDTH: Final = 2 # bytes - class VadSensitivity(StrEnum): """How quickly the end of a voice command is detected.""" @@ -26,12 +24,12 @@ class VadSensitivity(StrEnum): """Return seconds of silence for sensitivity level.""" sensitivity = VadSensitivity(sensitivity) if sensitivity == VadSensitivity.RELAXED: - return 2.0 + return 1.25 if sensitivity == VadSensitivity.AGGRESSIVE: - return 0.5 + return 0.25 - return 1.0 + return 0.7 class AudioBuffer: @@ -80,7 +78,10 @@ class VoiceCommandSegmenter: speech_seconds: float = 0.3 """Seconds of speech before voice command has started.""" - silence_seconds: float = 1.0 + command_seconds: float = 1.0 + """Minimum number of seconds for a voice command.""" + + silence_seconds: float = 0.7 """Seconds of silence after voice command has ended.""" timeout_seconds: float = 15.0 @@ -92,9 +93,15 @@ class VoiceCommandSegmenter: in_command: bool = False """True if inside voice command.""" + timed_out: bool = False + """True a timeout occurred during voice command.""" + _speech_seconds_left: float = 0.0 """Seconds left before considering voice command as started.""" + _command_seconds_left: float = 0.0 + """Seconds left before voice command could stop.""" + _silence_seconds_left: float = 0.0 """Seconds left before considering voice command as stopped.""" @@ -111,6 +118,7 @@ class VoiceCommandSegmenter: def reset(self) -> None: """Reset all counters and state.""" self._speech_seconds_left = self.speech_seconds + self._command_seconds_left = self.command_seconds - self.speech_seconds self._silence_seconds_left = self.silence_seconds self._timeout_seconds_left = self.timeout_seconds self._reset_seconds_left = self.reset_seconds @@ -121,6 +129,9 @@ class VoiceCommandSegmenter: Returns False when command is done. """ + if self.timed_out: + self.timed_out = False + self._timeout_seconds_left -= chunk_seconds if self._timeout_seconds_left <= 0: _LOGGER.warning( @@ -128,6 +139,7 @@ class VoiceCommandSegmenter: self.timeout_seconds, ) self.reset() + self.timed_out = True return False if not self.in_command: @@ -137,6 +149,9 @@ class VoiceCommandSegmenter: if self._speech_seconds_left <= 0: # Inside voice command self.in_command = True + self._command_seconds_left = ( + self.command_seconds - self.speech_seconds + ) self._silence_seconds_left = self.silence_seconds _LOGGER.debug("Voice command started") else: @@ -149,7 +164,8 @@ class VoiceCommandSegmenter: # Silence in command self._reset_seconds_left = self.reset_seconds self._silence_seconds_left -= chunk_seconds - if self._silence_seconds_left <= 0: + self._command_seconds_left -= chunk_seconds + if (self._silence_seconds_left <= 0) and (self._command_seconds_left <= 0): # Command finished successfully self.reset() _LOGGER.debug("Voice command finished") @@ -158,6 +174,7 @@ class VoiceCommandSegmenter: # Speech in command. # Reset silence counter if enough speech. self._reset_seconds_left -= chunk_seconds + self._command_seconds_left -= chunk_seconds if self._reset_seconds_left <= 0: self._silence_seconds_left = self.silence_seconds self._reset_seconds_left = self.reset_seconds @@ -179,7 +196,9 @@ class VoiceCommandSegmenter: """ if vad_samples_per_chunk is None: # No chunking - chunk_seconds = (len(chunk) // _SAMPLE_WIDTH) / _SAMPLE_RATE + chunk_seconds = ( + len(chunk) // (SAMPLE_WIDTH * SAMPLE_CHANNELS) + ) / SAMPLE_RATE is_speech = vad_is_speech(chunk) return self.process(chunk_seconds, is_speech) @@ -187,8 +206,8 @@ class VoiceCommandSegmenter: raise ValueError("leftover_chunk_buffer is required when vad uses chunking") # With chunking - seconds_per_chunk = vad_samples_per_chunk / _SAMPLE_RATE - bytes_per_chunk = vad_samples_per_chunk * _SAMPLE_WIDTH + seconds_per_chunk = vad_samples_per_chunk / SAMPLE_RATE + bytes_per_chunk = vad_samples_per_chunk * (SAMPLE_WIDTH * SAMPLE_CHANNELS) for vad_chunk in chunk_samples(chunk, bytes_per_chunk, leftover_chunk_buffer): is_speech = vad_is_speech(vad_chunk) if not self.process(seconds_per_chunk, is_speech): diff --git a/homeassistant/components/asterisk_cdr/__init__.py b/homeassistant/components/asterisk_cdr/__init__.py deleted file mode 100644 index d681a392c56..00000000000 --- a/homeassistant/components/asterisk_cdr/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The asterisk_cdr component.""" diff --git a/homeassistant/components/asterisk_cdr/mailbox.py b/homeassistant/components/asterisk_cdr/mailbox.py deleted file mode 100644 index fde4826fcee..00000000000 --- a/homeassistant/components/asterisk_cdr/mailbox.py +++ /dev/null @@ -1,70 +0,0 @@ -"""Support for the Asterisk CDR interface.""" - -from __future__ import annotations - -import datetime -import hashlib -from typing import Any - -from homeassistant.components.asterisk_mbox import ( - DOMAIN as ASTERISK_DOMAIN, - SIGNAL_CDR_UPDATE, -) -from homeassistant.components.mailbox import Mailbox -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -MAILBOX_NAME = "asterisk_cdr" - - -async def async_get_handler( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> Mailbox: - """Set up the Asterix CDR platform.""" - return AsteriskCDR(hass, MAILBOX_NAME) - - -class AsteriskCDR(Mailbox): - """Asterisk VM Call Data Record mailbox.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Asterisk CDR.""" - super().__init__(hass, name) - self.cdr: list[dict[str, Any]] = [] - async_dispatcher_connect(self.hass, SIGNAL_CDR_UPDATE, self._update_callback) - - @callback - def _update_callback(self, msg: list[dict[str, Any]]) -> Any: - """Update the message count in HA, if needed.""" - self._build_message() - self.async_update() - - def _build_message(self) -> None: - """Build message structure.""" - cdr: list[dict[str, Any]] = [] - for entry in self.hass.data[ASTERISK_DOMAIN].cdr: - timestamp = datetime.datetime.strptime( - entry["time"], "%Y-%m-%d %H:%M:%S" - ).timestamp() - info = { - "origtime": timestamp, - "callerid": entry["callerid"], - "duration": entry["duration"], - } - sha = hashlib.sha256(str(entry).encode("utf-8")).hexdigest() - msg = ( - f"Destination: {entry['dest']}\n" - f"Application: {entry['application']}\n " - f"Context: {entry['context']}" - ) - cdr.append({"info": info, "sha": sha, "text": msg}) - self.cdr = cdr - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - if not self.cdr: - self._build_message() - return self.cdr diff --git a/homeassistant/components/asterisk_cdr/manifest.json b/homeassistant/components/asterisk_cdr/manifest.json deleted file mode 100644 index 581b9dfb9a5..00000000000 --- a/homeassistant/components/asterisk_cdr/manifest.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "domain": "asterisk_cdr", - "name": "Asterisk Call Detail Records", - "codeowners": [], - "dependencies": ["asterisk_mbox"], - "documentation": "https://www.home-assistant.io/integrations/asterisk_cdr", - "iot_class": "local_polling" -} diff --git a/homeassistant/components/asterisk_mbox/__init__.py b/homeassistant/components/asterisk_mbox/__init__.py deleted file mode 100644 index 3e3913b7d42..00000000000 --- a/homeassistant/components/asterisk_mbox/__init__.py +++ /dev/null @@ -1,153 +0,0 @@ -"""Support for Asterisk Voicemail interface.""" - -import logging -from typing import Any, cast - -from asterisk_mbox import Client as asteriskClient -from asterisk_mbox.commands import ( - CMD_MESSAGE_CDR, - CMD_MESSAGE_CDR_AVAILABLE, - CMD_MESSAGE_LIST, -) -import voluptuous as vol - -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import discovery -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.dispatcher import async_dispatcher_send, dispatcher_connect -from homeassistant.helpers.issue_registry import IssueSeverity, create_issue -from homeassistant.helpers.typing import ConfigType - -_LOGGER = logging.getLogger(__name__) - -DOMAIN = "asterisk_mbox" - -SIGNAL_DISCOVER_PLATFORM = "asterisk_mbox.discover_platform" -SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request" -SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated" -SIGNAL_CDR_UPDATE = "asterisk_mbox.message_updated" -SIGNAL_CDR_REQUEST = "asterisk_mbox.message_request" - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_PORT): cv.port, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - - -def setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up for the Asterisk Voicemail box.""" - conf: dict[str, Any] = config[DOMAIN] - - host: str = conf[CONF_HOST] - port: int = conf[CONF_PORT] - password: str = conf[CONF_PASSWORD] - - hass.data[DOMAIN] = AsteriskData(hass, host, port, password, config) - create_issue( - hass, - DOMAIN, - "deprecated_integration", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_integration", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Asterisk Voicemail", - "mailbox": "mailbox", - }, - ) - - return True - - -class AsteriskData: - """Store Asterisk mailbox data.""" - - def __init__( - self, - hass: HomeAssistant, - host: str, - port: int, - password: str, - config: dict[str, Any], - ) -> None: - """Init the Asterisk data object.""" - - self.hass = hass - self.config = config - self.messages: list[dict[str, Any]] | None = None - self.cdr: list[dict[str, Any]] | None = None - - dispatcher_connect(self.hass, SIGNAL_MESSAGE_REQUEST, self._request_messages) - dispatcher_connect(self.hass, SIGNAL_CDR_REQUEST, self._request_cdr) - dispatcher_connect(self.hass, SIGNAL_DISCOVER_PLATFORM, self._discover_platform) - # Only connect after signal connection to ensure we don't miss any - self.client = asteriskClient(host, port, password, self.handle_data) - - @callback - def _discover_platform(self, component: str) -> None: - _LOGGER.debug("Adding mailbox %s", component) - self.hass.async_create_task( - discovery.async_load_platform( - self.hass, "mailbox", component, {}, self.config - ) - ) - - @callback - def handle_data( - self, command: int, msg: list[dict[str, Any]] | dict[str, Any] - ) -> None: - """Handle changes to the mailbox.""" - - if command == CMD_MESSAGE_LIST: - msg = cast(list[dict[str, Any]], msg) - _LOGGER.debug("AsteriskVM sent updated message list: Len %d", len(msg)) - old_messages = self.messages - self.messages = sorted( - msg, key=lambda item: item["info"]["origtime"], reverse=True - ) - if not isinstance(old_messages, list): - async_dispatcher_send(self.hass, SIGNAL_DISCOVER_PLATFORM, DOMAIN) - async_dispatcher_send(self.hass, SIGNAL_MESSAGE_UPDATE, self.messages) - elif command == CMD_MESSAGE_CDR: - msg = cast(dict[str, Any], msg) - _LOGGER.debug( - "AsteriskVM sent updated CDR list: Len %d", len(msg.get("entries", [])) - ) - self.cdr = msg["entries"] - async_dispatcher_send(self.hass, SIGNAL_CDR_UPDATE, self.cdr) - elif command == CMD_MESSAGE_CDR_AVAILABLE: - if not isinstance(self.cdr, list): - _LOGGER.debug("AsteriskVM adding CDR platform") - self.cdr = [] - async_dispatcher_send( - self.hass, SIGNAL_DISCOVER_PLATFORM, "asterisk_cdr" - ) - async_dispatcher_send(self.hass, SIGNAL_CDR_REQUEST) - else: - _LOGGER.debug( - "AsteriskVM sent unknown message '%d' len: %d", command, len(msg) - ) - - @callback - def _request_messages(self) -> None: - """Handle changes to the mailbox.""" - _LOGGER.debug("Requesting message list") - self.client.messages() - - @callback - def _request_cdr(self) -> None: - """Handle changes to the CDR.""" - _LOGGER.debug("Requesting CDR list") - self.client.get_cdr() diff --git a/homeassistant/components/asterisk_mbox/mailbox.py b/homeassistant/components/asterisk_mbox/mailbox.py deleted file mode 100644 index 14d54596eea..00000000000 --- a/homeassistant/components/asterisk_mbox/mailbox.py +++ /dev/null @@ -1,86 +0,0 @@ -"""Support for the Asterisk Voicemail interface.""" - -from __future__ import annotations - -from functools import partial -import logging -from typing import Any - -from asterisk_mbox import ServerError - -from homeassistant.components.mailbox import CONTENT_TYPE_MPEG, Mailbox, StreamError -from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from . import DOMAIN as ASTERISK_DOMAIN, AsteriskData - -_LOGGER = logging.getLogger(__name__) - -SIGNAL_MESSAGE_REQUEST = "asterisk_mbox.message_request" -SIGNAL_MESSAGE_UPDATE = "asterisk_mbox.message_updated" - - -async def async_get_handler( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> Mailbox: - """Set up the Asterix VM platform.""" - return AsteriskMailbox(hass, ASTERISK_DOMAIN) - - -class AsteriskMailbox(Mailbox): - """Asterisk VM Sensor.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Asterisk mailbox.""" - super().__init__(hass, name) - async_dispatcher_connect( - self.hass, SIGNAL_MESSAGE_UPDATE, self._update_callback - ) - - @callback - def _update_callback(self, msg: str) -> None: - """Update the message count in HA, if needed.""" - self.async_update() - - @property - def media_type(self) -> str: - """Return the supported media type.""" - return CONTENT_TYPE_MPEG - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return True - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return True - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - - data: AsteriskData = self.hass.data[ASTERISK_DOMAIN] - client = data.client - try: - return await self.hass.async_add_executor_job( - partial(client.mp3, msgid, sync=True) - ) - except ServerError as err: - raise StreamError(err) from err - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - data: AsteriskData = self.hass.data[ASTERISK_DOMAIN] - return data.messages or [] - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - data: AsteriskData = self.hass.data[ASTERISK_DOMAIN] - client = data.client - _LOGGER.info("Deleting: %s", msgid) - await self.hass.async_add_executor_job(client.delete, msgid) - return True diff --git a/homeassistant/components/asterisk_mbox/manifest.json b/homeassistant/components/asterisk_mbox/manifest.json deleted file mode 100644 index 8348e40ba6b..00000000000 --- a/homeassistant/components/asterisk_mbox/manifest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "domain": "asterisk_mbox", - "name": "Asterisk Voicemail", - "codeowners": [], - "documentation": "https://www.home-assistant.io/integrations/asterisk_mbox", - "iot_class": "local_push", - "loggers": ["asterisk_mbox"], - "requirements": ["asterisk_mbox==0.5.0"] -} diff --git a/homeassistant/components/asterisk_mbox/strings.json b/homeassistant/components/asterisk_mbox/strings.json deleted file mode 100644 index fb6c0637a64..00000000000 --- a/homeassistant/components/asterisk_mbox/strings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "issues": { - "deprecated_integration": { - "title": "The {integration_title} is being removed", - "description": "{integration_title} is being removed as the `{mailbox}` platform is being removed and {integration_title} supports no other platforms. Remove the `{domain}` configuration from your configuration.yaml file and restart Home Assistant to fix this issue." - } - } -} diff --git a/homeassistant/components/asuswrt/bridge.py b/homeassistant/components/asuswrt/bridge.py index b193787f500..4e928d63666 100644 --- a/homeassistant/components/asuswrt/bridge.py +++ b/homeassistant/components/asuswrt/bridge.py @@ -52,7 +52,7 @@ SENSORS_TYPE_LOAD_AVG = "sensors_load_avg" SENSORS_TYPE_RATES = "sensors_rates" SENSORS_TYPE_TEMPERATURES = "sensors_temperatures" -WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) +WrtDevice = namedtuple("WrtDevice", ["ip", "name", "connected_to"]) # noqa: PYI024 _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/august/__init__.py b/homeassistant/components/august/__init__.py index 53aa3cdffd8..434db46384b 100644 --- a/homeassistant/components/august/__init__.py +++ b/homeassistant/components/august/__init__.py @@ -6,15 +6,16 @@ from pathlib import Path from typing import cast from aiohttp import ClientResponseError +from yalexs.const import Brand from yalexs.exceptions import AugustApiAIOHTTPError from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from yalexs.manager.gateway import Config as YaleXSConfig from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STOP -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, issue_registry as ir from .const import DOMAIN, PLATFORMS from .data import AugustData @@ -24,7 +25,27 @@ from .util import async_create_august_clientsession type AugustConfigEntry = ConfigEntry[AugustData] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +@callback +def _async_create_yale_brand_migration_issue( + hass: HomeAssistant, entry: AugustConfigEntry +) -> None: + """Create an issue for a brand migration.""" + ir.async_create_issue( + hass, + DOMAIN, + "yale_brand_migration", + breaks_in_ha_version="2024.9", + learn_more_url="https://www.home-assistant.io/integrations/yale", + translation_key="yale_brand_migration", + is_fixable=False, + severity=ir.IssueSeverity.CRITICAL, + translation_placeholders={ + "migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale" + }, + ) + + +async def async_setup_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool: """Set up August from a config entry.""" session = async_create_august_clientsession(hass) august_gateway = AugustGateway(Path(hass.config.config_dir), session) @@ -40,6 +61,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True +async def async_remove_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> None: + """Remove an August config entry.""" + ir.async_delete_issue(hass, DOMAIN, "yale_brand_migration") + + async def async_unload_entry(hass: HomeAssistant, entry: AugustConfigEntry) -> bool: """Unload a config entry.""" return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) @@ -51,6 +77,8 @@ async def async_setup_august( """Set up the August component.""" config = cast(YaleXSConfig, entry.data) await august_gateway.async_setup(config) + if august_gateway.api.brand == Brand.YALE_HOME: + _async_create_yale_brand_migration_issue(hass, entry) await august_gateway.async_authenticate() await august_gateway.async_refresh_access_token_if_needed() data = entry.runtime_data = AugustData(hass, august_gateway) diff --git a/homeassistant/components/august/binary_sensor.py b/homeassistant/components/august/binary_sensor.py index 6a56692bcd6..fb877252010 100644 --- a/homeassistant/components/august/binary_sensor.py +++ b/homeassistant/components/august/binary_sensor.py @@ -109,12 +109,11 @@ async def async_setup_entry( for description in SENSOR_TYPES_DOORBELL ) - for doorbell in data.doorbells: - entities.extend( - AugustDoorbellBinarySensor(data, doorbell, description) - for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL - ) - + entities.extend( + AugustDoorbellBinarySensor(data, doorbell, description) + for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) async_add_entities(entities) diff --git a/homeassistant/components/august/button.py b/homeassistant/components/august/button.py index 406475db601..79f2b67888a 100644 --- a/homeassistant/components/august/button.py +++ b/homeassistant/components/august/button.py @@ -5,7 +5,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AugustConfigEntry -from .entity import AugustEntityMixin +from .entity import AugustEntity async def async_setup_entry( @@ -18,7 +18,7 @@ async def async_setup_entry( async_add_entities(AugustWakeLockButton(data, lock, "wake") for lock in data.locks) -class AugustWakeLockButton(AugustEntityMixin, ButtonEntity): +class AugustWakeLockButton(AugustEntity, ButtonEntity): """Representation of an August lock wake button.""" _attr_translation_key = "wake" diff --git a/homeassistant/components/august/camera.py b/homeassistant/components/august/camera.py index 4e569e2a91e..f4398455256 100644 --- a/homeassistant/components/august/camera.py +++ b/homeassistant/components/august/camera.py @@ -16,7 +16,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import AugustConfigEntry, AugustData from .const import DEFAULT_NAME, DEFAULT_TIMEOUT -from .entity import AugustEntityMixin +from .entity import AugustEntity _LOGGER = logging.getLogger(__name__) @@ -38,7 +38,7 @@ async def async_setup_entry( ) -class AugustCamera(AugustEntityMixin, Camera): +class AugustCamera(AugustEntity, Camera): """An implementation of an August security camera.""" _attr_translation_key = "camera" diff --git a/homeassistant/components/august/config_flow.py b/homeassistant/components/august/config_flow.py index 2a1a20a9dc4..58c3549fe4d 100644 --- a/homeassistant/components/august/config_flow.py +++ b/homeassistant/components/august/config_flow.py @@ -9,7 +9,7 @@ from typing import Any import aiohttp import voluptuous as vol from yalexs.authenticator_common import ValidationResult -from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND +from yalexs.const import BRANDS_WITHOUT_OAUTH, DEFAULT_BRAND, Brand from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -28,6 +28,12 @@ from .const import ( from .gateway import AugustGateway from .util import async_create_august_clientsession +# The Yale Home Brand is not supported by the August integration +# anymore and should migrate to the Yale integration +AVAILABLE_BRANDS = BRANDS_WITHOUT_OAUTH.copy() +del AVAILABLE_BRANDS[Brand.YALE_HOME] + + _LOGGER = logging.getLogger(__name__) @@ -118,7 +124,7 @@ class AugustConfigFlow(ConfigFlow, domain=DOMAIN): vol.Required( CONF_BRAND, default=self._user_auth_details.get(CONF_BRAND, DEFAULT_BRAND), - ): vol.In(BRANDS_WITHOUT_OAUTH), + ): vol.In(AVAILABLE_BRANDS), vol.Required( CONF_LOGIN_METHOD, default=self._user_auth_details.get( diff --git a/homeassistant/components/august/entity.py b/homeassistant/components/august/entity.py index babf5c587fb..28c722354ba 100644 --- a/homeassistant/components/august/entity.py +++ b/homeassistant/components/august/entity.py @@ -20,7 +20,7 @@ from .const import MANUFACTURER DEVICE_TYPES = ["keypad", "lock", "camera", "doorbell", "door", "bell"] -class AugustEntityMixin(Entity): +class AugustEntity(Entity): """Base implementation for August device.""" _attr_should_poll = False @@ -87,7 +87,7 @@ class AugustEntityMixin(Entity): self._update_from_data() -class AugustDescriptionEntity(AugustEntityMixin): +class AugustDescriptionEntity(AugustEntity): """An August entity with a description.""" def __init__( diff --git a/homeassistant/components/august/event.py b/homeassistant/components/august/event.py index b65f72272a3..49b14630337 100644 --- a/homeassistant/components/august/event.py +++ b/homeassistant/components/august/event.py @@ -63,22 +63,17 @@ async def async_setup_entry( ) -> None: """Set up the august event platform.""" data = config_entry.runtime_data - entities: list[AugustEventEntity] = [] - - for lock in data.locks: - detail = data.get_device_detail(lock.device_id) - if detail.doorbell: - entities.extend( - AugustEventEntity(data, lock, description) - for description in TYPES_DOORBELL - ) - - for doorbell in data.doorbells: - entities.extend( - AugustEventEntity(data, doorbell, description) - for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL - ) - + entities: list[AugustEventEntity] = [ + AugustEventEntity(data, lock, description) + for description in TYPES_DOORBELL + for lock in data.locks + if (detail := data.get_device_detail(lock.device_id)) and detail.doorbell + ] + entities.extend( + AugustEventEntity(data, doorbell, description) + for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) async_add_entities(entities) @@ -86,7 +81,6 @@ class AugustEventEntity(AugustDescriptionEntity, EventEntity): """An august event entity.""" entity_description: AugustEventEntityDescription - _attr_has_entity_name = True _last_activity: Activity | None = None @callback diff --git a/homeassistant/components/august/lock.py b/homeassistant/components/august/lock.py index 5382c710229..fe5d90371ad 100644 --- a/homeassistant/components/august/lock.py +++ b/homeassistant/components/august/lock.py @@ -19,7 +19,7 @@ from homeassistant.helpers.restore_state import RestoreEntity import homeassistant.util.dt as dt_util from . import AugustConfigEntry, AugustData -from .entity import AugustEntityMixin +from .entity import AugustEntity _LOGGER = logging.getLogger(__name__) @@ -36,7 +36,7 @@ async def async_setup_entry( async_add_entities(AugustLock(data, lock) for lock in data.locks) -class AugustLock(AugustEntityMixin, RestoreEntity, LockEntity): +class AugustLock(AugustEntity, RestoreEntity, LockEntity): """Representation of an August lock.""" _attr_name = None diff --git a/homeassistant/components/august/manifest.json b/homeassistant/components/august/manifest.json index e0739aadff0..6635a95f1cf 100644 --- a/homeassistant/components/august/manifest.json +++ b/homeassistant/components/august/manifest.json @@ -4,10 +4,6 @@ "codeowners": ["@bdraco"], "config_flow": true, "dhcp": [ - { - "hostname": "yale-connect-plus", - "macaddress": "00177A*" - }, { "hostname": "connect", "macaddress": "D86162*" @@ -28,5 +24,5 @@ "documentation": "https://www.home-assistant.io/integrations/august", "iot_class": "cloud_push", "loggers": ["pubnub", "yalexs"], - "requirements": ["yalexs==8.4.1", "yalexs-ble==2.4.3"] + "requirements": ["yalexs==8.6.3", "yalexs-ble==2.4.3"] } diff --git a/homeassistant/components/august/sensor.py b/homeassistant/components/august/sensor.py index 7a4c1a92358..b7c0d618492 100644 --- a/homeassistant/components/august/sensor.py +++ b/homeassistant/components/august/sensor.py @@ -4,7 +4,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from typing import Any, Generic, TypeVar, cast +from typing import Any, cast from yalexs.activity import ActivityType, LockOperationActivity from yalexs.doorbell import Doorbell @@ -42,7 +42,7 @@ from .const import ( OPERATION_METHOD_REMOTE, OPERATION_METHOD_TAG, ) -from .entity import AugustDescriptionEntity, AugustEntityMixin +from .entity import AugustDescriptionEntity, AugustEntity def _retrieve_device_battery_state(detail: LockDetail) -> int: @@ -55,14 +55,13 @@ def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None: return detail.battery_percentage -_T = TypeVar("_T", LockDetail, KeypadDetail) - - @dataclass(frozen=True, kw_only=True) -class AugustSensorEntityDescription(SensorEntityDescription, Generic[_T]): +class AugustSensorEntityDescription[T: LockDetail | KeypadDetail]( + SensorEntityDescription +): """Mixin for required keys.""" - value_fn: Callable[[_T], int | None] + value_fn: Callable[[T], int | None] SENSOR_TYPE_DEVICE_BATTERY = AugustSensorEntityDescription[LockDetail]( @@ -114,7 +113,7 @@ async def async_setup_entry( async_add_entities(entities) -class AugustOperatorSensor(AugustEntityMixin, RestoreSensor): +class AugustOperatorSensor(AugustEntity, RestoreSensor): """Representation of an August lock operation sensor.""" _attr_translation_key = "operator" @@ -198,10 +197,12 @@ class AugustOperatorSensor(AugustEntityMixin, RestoreSensor): self._operated_autorelock = last_attrs[ATTR_OPERATION_AUTORELOCK] -class AugustBatterySensor(AugustDescriptionEntity, SensorEntity, Generic[_T]): +class AugustBatterySensor[T: LockDetail | KeypadDetail]( + AugustDescriptionEntity, SensorEntity +): """Representation of an August sensor.""" - entity_description: AugustSensorEntityDescription[_T] + entity_description: AugustSensorEntityDescription[T] _attr_device_class = SensorDeviceClass.BATTERY _attr_native_unit_of_measurement = PERCENTAGE diff --git a/homeassistant/components/august/strings.json b/homeassistant/components/august/strings.json index 772a8dca479..589a494590b 100644 --- a/homeassistant/components/august/strings.json +++ b/homeassistant/components/august/strings.json @@ -1,4 +1,10 @@ { + "issues": { + "yale_brand_migration": { + "title": "Yale Home has a new integration", + "description": "Add the [Yale integration]({migrate_url}), and remove the August integration as soon as possible to avoid an interruption in service. The Yale Home brand will stop working with the August integration soon and will be removed in a future release." + } + }, "config": { "error": { "unhandled": "Unhandled error: {error}", diff --git a/homeassistant/components/august/util.py b/homeassistant/components/august/util.py index 47482100794..5449d048613 100644 --- a/homeassistant/components/august/util.py +++ b/homeassistant/components/august/util.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import datetime, timedelta from functools import partial -import socket import aiohttp from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType @@ -26,14 +25,7 @@ def async_create_august_clientsession(hass: HomeAssistant) -> aiohttp.ClientSess # Create an aiohttp session instead of using the default one since the # default one is likely to trigger august's WAF if another integration # is also using Cloudflare - # - # The family is set to AF_INET because IPv6 keeps coming up as an issue - # see https://github.com/home-assistant/core/issues/97146 - # - # When https://github.com/aio-libs/aiohttp/issues/4451 is implemented - # we can allow IPv6 again - # - return aiohttp_client.async_create_clientsession(hass, family=socket.AF_INET) + return aiohttp_client.async_create_clientsession(hass) def retrieve_time_based_activity( @@ -71,16 +63,11 @@ def _activity_time_based(latest: Activity) -> Activity | None: """Get the latest state of the sensor.""" start = latest.activity_start_time end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION - if start <= _native_datetime() <= end: + if start <= datetime.now() <= end: return latest return None -def _native_datetime() -> datetime: - """Return time in the format august uses without timezone.""" - return datetime.now() - - def retrieve_online_state( data: AugustData, detail: DoorbellDetail | LockDetail ) -> bool: diff --git a/homeassistant/components/aurora_abb_powerone/config_flow.py b/homeassistant/components/aurora_abb_powerone/config_flow.py index f0093c62631..47c349ab48a 100644 --- a/homeassistant/components/aurora_abb_powerone/config_flow.py +++ b/homeassistant/components/aurora_abb_powerone/config_flow.py @@ -75,11 +75,10 @@ class AuroraABBConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialise the config flow.""" - self.config = None self._com_ports_list: list[str] | None = None - self._default_com_port = None + self._default_com_port: str | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/aussie_broadband/config_flow.py b/homeassistant/components/aussie_broadband/config_flow.py index 587c7df2b36..65507d57e8b 100644 --- a/homeassistant/components/aussie_broadband/config_flow.py +++ b/homeassistant/components/aussie_broadband/config_flow.py @@ -22,11 +22,11 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" self.data: dict = {} self.options: dict = {CONF_SERVICES: []} - self.services: list[dict[str]] = [] + self.services: list[dict[str, Any]] = [] self.client: AussieBB | None = None self._reauth_username: str | None = None @@ -99,15 +99,11 @@ class AussieBroadbandConfigFlow(ConfigFlow, domain=DOMAIN): } if not (errors := await self.async_auth(data)): - entry = await self.async_set_unique_id(self._reauth_username.lower()) - if entry: - self.hass.config_entries.async_update_entry( - entry, - data=data, - ) - await self.hass.config_entries.async_reload(entry.entry_id) - return self.async_abort(reason="reauth_successful") - return self.async_create_entry(title=self._reauth_username, data=data) + entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + assert entry + return self.async_update_reload_and_abort(entry, data=data) return self.async_show_form( step_id="reauth_confirm", diff --git a/homeassistant/components/automation/__init__.py b/homeassistant/components/automation/__init__.py index f2ef404ab34..8ab9c478bc4 100644 --- a/homeassistant/components/automation/__init__.py +++ b/homeassistant/components/automation/__init__.py @@ -322,8 +322,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: }, trigger_service_handler, ) - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") component.async_register_entity_service( SERVICE_TURN_OFF, {vol.Optional(CONF_STOP_ACTIONS, default=DEFAULT_STOP_ACTIONS): cv.boolean}, diff --git a/homeassistant/components/automation/blueprints/motion_light.yaml b/homeassistant/components/automation/blueprints/motion_light.yaml index 8f5d3f957f9..ad9c6f0286b 100644 --- a/homeassistant/components/automation/blueprints/motion_light.yaml +++ b/homeassistant/components/automation/blueprints/motion_light.yaml @@ -10,8 +10,10 @@ blueprint: selector: entity: filter: - device_class: motion - domain: binary_sensor + - device_class: occupancy + domain: binary_sensor + - device_class: motion + domain: binary_sensor light_target: name: Light selector: diff --git a/homeassistant/components/automation/icons.json b/homeassistant/components/automation/icons.json index 9b68825ffd1..f1e0f26ef65 100644 --- a/homeassistant/components/automation/icons.json +++ b/homeassistant/components/automation/icons.json @@ -9,10 +9,20 @@ } }, "services": { - "turn_on": "mdi:robot", - "turn_off": "mdi:robot-off", - "toggle": "mdi:robot", - "trigger": "mdi:robot", - "reload": "mdi:reload" + "turn_on": { + "service": "mdi:robot" + }, + "turn_off": { + "service": "mdi:robot-off" + }, + "toggle": { + "service": "mdi:robot" + }, + "trigger": { + "service": "mdi:robot" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/aws/config_flow.py b/homeassistant/components/aws/config_flow.py index 8c80b0d487d..3175e6bc56c 100644 --- a/homeassistant/components/aws/config_flow.py +++ b/homeassistant/components/aws/config_flow.py @@ -1,6 +1,5 @@ """Config flow for AWS component.""" -from collections.abc import Mapping from typing import Any from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -13,11 +12,9 @@ class AWSFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, user_input: Mapping[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="configuration.yaml", data=user_input) + return self.async_create_entry(title="configuration.yaml", data=import_data) diff --git a/homeassistant/components/azure_devops/coordinator.py b/homeassistant/components/azure_devops/coordinator.py index 22dbe32c103..2460a9bbfce 100644 --- a/homeassistant/components/azure_devops/coordinator.py +++ b/homeassistant/components/azure_devops/coordinator.py @@ -6,7 +6,7 @@ import logging from typing import Final from aioazuredevops.client import DevOpsClient -from aioazuredevops.models.builds import Build +from aioazuredevops.models.build import Build from aioazuredevops.models.core import Project import aiohttp diff --git a/homeassistant/components/azure_devops/data.py b/homeassistant/components/azure_devops/data.py index 6d9e2069b67..c2da38ccc09 100644 --- a/homeassistant/components/azure_devops/data.py +++ b/homeassistant/components/azure_devops/data.py @@ -2,7 +2,7 @@ from dataclasses import dataclass -from aioazuredevops.models.builds import Build +from aioazuredevops.models.build import Build from aioazuredevops.models.core import Project diff --git a/homeassistant/components/azure_devops/manifest.json b/homeassistant/components/azure_devops/manifest.json index 48ceee5f9d8..5086e44ab0f 100644 --- a/homeassistant/components/azure_devops/manifest.json +++ b/homeassistant/components/azure_devops/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/azure_devops", "iot_class": "cloud_polling", "loggers": ["aioazuredevops"], - "requirements": ["aioazuredevops==2.1.1"] + "requirements": ["aioazuredevops==2.2.1"] } diff --git a/homeassistant/components/azure_devops/sensor.py b/homeassistant/components/azure_devops/sensor.py index 029d3d875dc..7b7af1dd666 100644 --- a/homeassistant/components/azure_devops/sensor.py +++ b/homeassistant/components/azure_devops/sensor.py @@ -8,7 +8,7 @@ from datetime import datetime import logging from typing import Any -from aioazuredevops.models.builds import Build +from aioazuredevops.models.build import Build from homeassistant.components.sensor import ( SensorDeviceClass, diff --git a/homeassistant/components/azure_event_hub/config_flow.py b/homeassistant/components/azure_event_hub/config_flow.py index 264daa683bc..046851e6926 100644 --- a/homeassistant/components/azure_event_hub/config_flow.py +++ b/homeassistant/components/azure_event_hub/config_flow.py @@ -154,17 +154,15 @@ class AEHConfigFlow(ConfigFlow, domain=DOMAIN): options=self._options, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from configuration.yaml.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - if CONF_SEND_INTERVAL in import_config: - self._options[CONF_SEND_INTERVAL] = import_config.pop(CONF_SEND_INTERVAL) - if CONF_MAX_DELAY in import_config: - self._options[CONF_MAX_DELAY] = import_config.pop(CONF_MAX_DELAY) - self._data = import_config + if CONF_SEND_INTERVAL in import_data: + self._options[CONF_SEND_INTERVAL] = import_data.pop(CONF_SEND_INTERVAL) + if CONF_MAX_DELAY in import_data: + self._options[CONF_MAX_DELAY] = import_data.pop(CONF_MAX_DELAY) + self._data = import_data errors = await validate_data(self._data) if errors: return self.async_abort(reason=errors["base"]) diff --git a/homeassistant/components/backup/icons.json b/homeassistant/components/backup/icons.json index cba4fb22831..bd5ff4a81ee 100644 --- a/homeassistant/components/backup/icons.json +++ b/homeassistant/components/backup/icons.json @@ -1,5 +1,7 @@ { "services": { - "create": "mdi:cloud-upload" + "create": { + "service": "mdi:cloud-upload" + } } } diff --git a/homeassistant/components/bang_olufsen/const.py b/homeassistant/components/bang_olufsen/const.py index 657bedcf4d7..748b4baf621 100644 --- a/homeassistant/components/bang_olufsen/const.py +++ b/homeassistant/components/bang_olufsen/const.py @@ -68,20 +68,20 @@ class BangOlufsenModel(StrEnum): class WebsocketNotification(StrEnum): """Enum for WebSocket notification types.""" - PLAYBACK_ERROR: Final[str] = "playback_error" - PLAYBACK_METADATA: Final[str] = "playback_metadata" - PLAYBACK_PROGRESS: Final[str] = "playback_progress" - PLAYBACK_SOURCE: Final[str] = "playback_source" - PLAYBACK_STATE: Final[str] = "playback_state" - SOFTWARE_UPDATE_STATE: Final[str] = "software_update_state" - SOURCE_CHANGE: Final[str] = "source_change" - VOLUME: Final[str] = "volume" + PLAYBACK_ERROR = "playback_error" + PLAYBACK_METADATA = "playback_metadata" + PLAYBACK_PROGRESS = "playback_progress" + PLAYBACK_SOURCE = "playback_source" + PLAYBACK_STATE = "playback_state" + SOFTWARE_UPDATE_STATE = "software_update_state" + SOURCE_CHANGE = "source_change" + VOLUME = "volume" # Sub-notifications - NOTIFICATION: Final[str] = "notification" - REMOTE_MENU_CHANGED: Final[str] = "remoteMenuChanged" + NOTIFICATION = "notification" + REMOTE_MENU_CHANGED = "remoteMenuChanged" - ALL: Final[str] = "all" + ALL = "all" DOMAIN: Final[str] = "bang_olufsen" diff --git a/homeassistant/components/bang_olufsen/media_player.py b/homeassistant/components/bang_olufsen/media_player.py index 5f8b7638125..8bc97858d0d 100644 --- a/homeassistant/components/bang_olufsen/media_player.py +++ b/homeassistant/components/bang_olufsen/media_player.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable import json import logging from typing import Any, cast @@ -137,65 +138,25 @@ class BangOlufsenMediaPlayer(BangOlufsenEntity, MediaPlayerEntity): """Turn on the dispatchers.""" await self._initialize() - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{CONNECTION_STATUS}", - self._async_update_connection_state, - ) - ) + signal_handlers: dict[str, Callable] = { + CONNECTION_STATUS: self._async_update_connection_state, + WebsocketNotification.PLAYBACK_ERROR: self._async_update_playback_error, + WebsocketNotification.PLAYBACK_METADATA: self._async_update_playback_metadata, + WebsocketNotification.PLAYBACK_PROGRESS: self._async_update_playback_progress, + WebsocketNotification.PLAYBACK_STATE: self._async_update_playback_state, + WebsocketNotification.REMOTE_MENU_CHANGED: self._async_update_sources, + WebsocketNotification.SOURCE_CHANGE: self._async_update_source_change, + WebsocketNotification.VOLUME: self._async_update_volume, + } - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.PLAYBACK_ERROR}", - self._async_update_playback_error, + for signal, signal_handler in signal_handlers.items(): + self.async_on_remove( + async_dispatcher_connect( + self.hass, + f"{self._unique_id}_{signal}", + signal_handler, + ) ) - ) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.PLAYBACK_METADATA}", - self._async_update_playback_metadata, - ) - ) - - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.PLAYBACK_PROGRESS}", - self._async_update_playback_progress, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.PLAYBACK_STATE}", - self._async_update_playback_state, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}", - self._async_update_sources, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.SOURCE_CHANGE}", - self._async_update_source_change, - ) - ) - self.async_on_remove( - async_dispatcher_connect( - self.hass, - f"{self._unique_id}_{WebsocketNotification.VOLUME}", - self._async_update_volume, - ) - ) async def _initialize(self) -> None: """Initialize connection dependent variables.""" diff --git a/homeassistant/components/bang_olufsen/util.py b/homeassistant/components/bang_olufsen/util.py index 617eb4b1df6..c54b3059ee4 100644 --- a/homeassistant/components/bang_olufsen/util.py +++ b/homeassistant/components/bang_olufsen/util.py @@ -9,11 +9,8 @@ from homeassistant.helpers.device_registry import DeviceEntry from .const import DOMAIN -def get_device(hass: HomeAssistant | None, unique_id: str) -> DeviceEntry | None: +def get_device(hass: HomeAssistant, unique_id: str) -> DeviceEntry: """Get the device.""" - if not isinstance(hass, HomeAssistant): - return None - device_registry = dr.async_get(hass) device = device_registry.async_get_device({(DOMAIN, unique_id)}) assert device diff --git a/homeassistant/components/bang_olufsen/websocket.py b/homeassistant/components/bang_olufsen/websocket.py index 7415d0f362b..0c0a5096d91 100644 --- a/homeassistant/components/bang_olufsen/websocket.py +++ b/homeassistant/components/bang_olufsen/websocket.py @@ -20,6 +20,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.util.enum import try_parse_enum from .const import ( BANG_OLUFSEN_WEBSOCKET_EVENT, @@ -92,12 +93,14 @@ class BangOlufsenWebsocket(BangOlufsenBase): self, notification: WebsocketNotificationTag ) -> None: """Send notification dispatch.""" - if notification.value: - if WebsocketNotification.REMOTE_MENU_CHANGED in notification.value: - async_dispatcher_send( - self.hass, - f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}", - ) + # Try to match the notification type with available WebsocketNotification members + notification_type = try_parse_enum(WebsocketNotification, notification.value) + + if notification_type is WebsocketNotification.REMOTE_MENU_CHANGED: + async_dispatcher_send( + self.hass, + f"{self._unique_id}_{WebsocketNotification.REMOTE_MENU_CHANGED}", + ) def on_playback_error_notification(self, notification: PlaybackError) -> None: """Send playback_error dispatch.""" @@ -154,11 +157,6 @@ class BangOlufsenWebsocket(BangOlufsenBase): software_status = await self._client.get_softwareupdate_status() # Update the HA device if the sw version does not match - if not self._device: - self._device = get_device(self.hass, self._unique_id) - - assert self._device - if software_status.software_version != self._device.sw_version: device_registry = dr.async_get(self.hass) @@ -169,10 +167,6 @@ class BangOlufsenWebsocket(BangOlufsenBase): def on_all_notifications_raw(self, notification: dict) -> None: """Receive all notifications.""" - if not self._device: - self._device = get_device(self.hass, self._unique_id) - - assert self._device # Add the device_id and serial_number to the notification notification["device_id"] = self._device.id diff --git a/homeassistant/components/bayesian/icons.json b/homeassistant/components/bayesian/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/bayesian/icons.json +++ b/homeassistant/components/bayesian/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/bbox/device_tracker.py b/homeassistant/components/bbox/device_tracker.py index 6ced2c73c9a..7157c47830c 100644 --- a/homeassistant/components/bbox/device_tracker.py +++ b/homeassistant/components/bbox/device_tracker.py @@ -39,7 +39,7 @@ def get_scanner(hass: HomeAssistant, config: ConfigType) -> BboxDeviceScanner | return scanner if scanner.success_init else None -Device = namedtuple("Device", ["mac", "name", "ip", "last_update"]) +Device = namedtuple("Device", ["mac", "name", "ip", "last_update"]) # noqa: PYI024 class BboxDeviceScanner(DeviceScanner): diff --git a/homeassistant/components/binary_sensor/strings.json b/homeassistant/components/binary_sensor/strings.json index 162cf139a1d..b86a6374f28 100644 --- a/homeassistant/components/binary_sensor/strings.json +++ b/homeassistant/components/binary_sensor/strings.json @@ -243,8 +243,8 @@ "power": { "name": "Power", "state": { - "off": "[%key:component::binary_sensor::entity_component::gas::state::off%]", - "on": "[%key:component::binary_sensor::entity_component::gas::state::on%]" + "off": "[%key:common::state::off%]", + "on": "[%key:common::state::on%]" } }, "presence": { diff --git a/homeassistant/components/blackbird/icons.json b/homeassistant/components/blackbird/icons.json index f080fb5f857..815a45ba174 100644 --- a/homeassistant/components/blackbird/icons.json +++ b/homeassistant/components/blackbird/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_all_zones": "mdi:home-sound-in" + "set_all_zones": { + "service": "mdi:home-sound-in" + } } } diff --git a/homeassistant/components/blebox/config_flow.py b/homeassistant/components/blebox/config_flow.py index 1f04f06a05a..2221e35a81f 100644 --- a/homeassistant/components/blebox/config_flow.py +++ b/homeassistant/components/blebox/config_flow.py @@ -35,15 +35,11 @@ from .const import ( _LOGGER = logging.getLogger(__name__) -def host_port(data): - """Return a list with host and port.""" - return (data[CONF_HOST], data[CONF_PORT]) - - def create_schema(previous_input=None): """Create a schema with given values as default.""" if previous_input is not None: - host, port = host_port(previous_input) + host = previous_input[CONF_HOST] + port = previous_input[CONF_PORT] else: host = DEFAULT_HOST port = DEFAULT_PORT @@ -70,9 +66,9 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the BleBox config flow.""" - self.device_config = {} + self.device_config: dict[str, Any] = {} def handle_step_exception( self, step, exception, schema, host, port, message_id, log_fn @@ -146,7 +142,9 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN): }, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle initial user-triggered config step.""" hass = self.hass schema = create_schema(user_input) @@ -159,14 +157,14 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={}, ) - addr = host_port(user_input) + host = user_input[CONF_HOST] + port = user_input[CONF_PORT] username = user_input.get(CONF_USERNAME) password = user_input.get(CONF_PASSWORD) for entry in self._async_current_entries(): - if addr == host_port(entry.data): - host, port = addr + if host == entry.data[CONF_HOST] and port == entry.data[CONF_PORT]: return self.async_abort( reason=ADDRESS_ALREADY_CONFIGURED, description_placeholders={"address": f"{host}:{port}"}, @@ -174,27 +172,35 @@ class BleBoxConfigFlow(ConfigFlow, domain=DOMAIN): websession = get_maybe_authenticated_session(hass, password, username) - api_host = ApiHost(*addr, DEFAULT_SETUP_TIMEOUT, websession, hass.loop, _LOGGER) + api_host = ApiHost( + host, port, DEFAULT_SETUP_TIMEOUT, websession, hass.loop, _LOGGER + ) try: product = await Box.async_from_host(api_host) except UnsupportedBoxVersion as ex: return self.handle_step_exception( - "user", ex, schema, *addr, UNSUPPORTED_VERSION, _LOGGER.debug + "user", + ex, + schema, + host, + port, + UNSUPPORTED_VERSION, + _LOGGER.debug, ) except UnauthorizedRequest as ex: return self.handle_step_exception( - "user", ex, schema, *addr, CANNOT_CONNECT, _LOGGER.error + "user", ex, schema, host, port, CANNOT_CONNECT, _LOGGER.error ) except Error as ex: return self.handle_step_exception( - "user", ex, schema, *addr, CANNOT_CONNECT, _LOGGER.warning + "user", ex, schema, host, port, CANNOT_CONNECT, _LOGGER.warning ) except RuntimeError as ex: return self.handle_step_exception( - "user", ex, schema, *addr, UNKNOWN, _LOGGER.error + "user", ex, schema, host, port, UNKNOWN, _LOGGER.error ) # Check if configured but IP changed since diff --git a/homeassistant/components/blebox/cover.py b/homeassistant/components/blebox/cover.py index f9e974991f5..bb75c88ca2a 100644 --- a/homeassistant/components/blebox/cover.py +++ b/homeassistant/components/blebox/cover.py @@ -6,6 +6,7 @@ from typing import Any from blebox_uniapi.box import Box import blebox_uniapi.cover +from blebox_uniapi.cover import BleboxCoverState from homeassistant.components.cover import ( ATTR_POSITION, @@ -28,19 +29,18 @@ BLEBOX_TO_COVER_DEVICE_CLASSES = { "shutter": CoverDeviceClass.SHUTTER, } - BLEBOX_TO_HASS_COVER_STATES = { None: None, - 0: STATE_CLOSING, # moving down - 1: STATE_OPENING, # moving up - 2: STATE_OPEN, # manually stopped - 3: STATE_CLOSED, # lower limit - 4: STATE_OPEN, # upper limit / open - # gateController - 5: STATE_OPEN, # overload - 6: STATE_OPEN, # motor failure - # 7 is not used - 8: STATE_OPEN, # safety stop + # all blebox covers + BleboxCoverState.MOVING_DOWN: STATE_CLOSING, + BleboxCoverState.MOVING_UP: STATE_OPENING, + BleboxCoverState.MANUALLY_STOPPED: STATE_OPEN, + BleboxCoverState.LOWER_LIMIT_REACHED: STATE_CLOSED, + BleboxCoverState.UPPER_LIMIT_REACHED: STATE_OPEN, + # extra states of gateController product + BleboxCoverState.OVERLOAD: STATE_OPEN, + BleboxCoverState.MOTOR_FAILURE: STATE_OPEN, + BleboxCoverState.SAFETY_STOP: STATE_OPEN, } @@ -64,14 +64,20 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity): """Initialize a BleBox cover feature.""" super().__init__(feature) self._attr_device_class = BLEBOX_TO_COVER_DEVICE_CLASSES[feature.device_class] - position = CoverEntityFeature.SET_POSITION if feature.is_slider else 0 - stop = CoverEntityFeature.STOP if feature.has_stop else 0 self._attr_supported_features = ( - position | stop | CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE ) + if feature.is_slider: + self._attr_supported_features |= CoverEntityFeature.SET_POSITION + + if feature.has_stop: + self._attr_supported_features |= CoverEntityFeature.STOP + if feature.has_tilt: - self._attr_supported_features = ( - self._attr_supported_features | CoverEntityFeature.SET_TILT_POSITION + self._attr_supported_features |= ( + CoverEntityFeature.SET_TILT_POSITION + | CoverEntityFeature.OPEN_TILT + | CoverEntityFeature.CLOSE_TILT ) @property @@ -105,16 +111,24 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity): return self._is_state(STATE_CLOSED) async def async_open_cover(self, **kwargs: Any) -> None: - """Open the cover position.""" + """Fully open the cover position.""" await self._feature.async_open() async def async_close_cover(self, **kwargs: Any) -> None: - """Close the cover position.""" + """Fully close the cover position.""" await self._feature.async_close() + async def async_open_cover_tilt(self, **kwargs: Any) -> None: + """Fully open the cover tilt.""" + await self._feature.async_set_tilt_position(0) + + async def async_close_cover_tilt(self, **kwargs: Any) -> None: + """Fully close the cover tilt.""" + # note: values are reversed + await self._feature.async_set_tilt_position(100) + async def async_set_cover_position(self, **kwargs: Any) -> None: """Set the cover position.""" - position = kwargs[ATTR_POSITION] await self._feature.async_set_position(100 - position) @@ -124,7 +138,6 @@ class BleBoxCoverEntity(BleBoxEntity[blebox_uniapi.cover.Cover], CoverEntity): async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: """Set the tilt position.""" - position = kwargs[ATTR_TILT_POSITION] await self._feature.async_set_tilt_position(100 - position) diff --git a/homeassistant/components/blebox/sensor.py b/homeassistant/components/blebox/sensor.py index 2642bfd0139..fa11f6d6680 100644 --- a/homeassistant/components/blebox/sensor.py +++ b/homeassistant/components/blebox/sensor.py @@ -14,13 +14,13 @@ from homeassistant.const import ( CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, UnitOfApparentPower, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfSpeed, UnitOfTemperature, ) @@ -85,7 +85,7 @@ SENSOR_TYPES = ( SensorEntityDescription( key="reactivePower", device_class=SensorDeviceClass.POWER, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, ), SensorEntityDescription( key="activePower", diff --git a/homeassistant/components/blink/camera.py b/homeassistant/components/blink/camera.py index fcf19adf71e..cce9100a0bd 100644 --- a/homeassistant/components/blink/camera.py +++ b/homeassistant/components/blink/camera.py @@ -51,8 +51,8 @@ async def async_setup_entry( async_add_entities(entities) platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service(SERVICE_RECORD, {}, "record") - platform.async_register_entity_service(SERVICE_TRIGGER, {}, "trigger_camera") + platform.async_register_entity_service(SERVICE_RECORD, None, "record") + platform.async_register_entity_service(SERVICE_TRIGGER, None, "trigger_camera") platform.async_register_entity_service( SERVICE_SAVE_RECENT_CLIPS, {vol.Required(CONF_FILE_PATH): cv.string}, diff --git a/homeassistant/components/blink/icons.json b/homeassistant/components/blink/icons.json index 615a3c4c6dc..bea67b25f6d 100644 --- a/homeassistant/components/blink/icons.json +++ b/homeassistant/components/blink/icons.json @@ -12,10 +12,20 @@ } }, "services": { - "record": "mdi:video-box", - "trigger_camera": "mdi:image-refresh", - "save_video": "mdi:file-video", - "save_recent_clips": "mdi:file-video", - "send_pin": "mdi:two-factor-authentication" + "record": { + "service": "mdi:video-box" + }, + "trigger_camera": { + "service": "mdi:image-refresh" + }, + "save_video": { + "service": "mdi:file-video" + }, + "save_recent_clips": { + "service": "mdi:file-video" + }, + "send_pin": { + "service": "mdi:two-factor-authentication" + } } } diff --git a/homeassistant/components/bluesound/icons.json b/homeassistant/components/bluesound/icons.json index 8c886f12dfd..2c5e95291c1 100644 --- a/homeassistant/components/bluesound/icons.json +++ b/homeassistant/components/bluesound/icons.json @@ -1,8 +1,16 @@ { "services": { - "join": "mdi:link-variant", - "unjoin": "mdi:link-variant-off", - "set_sleep_timer": "mdi:sleep", - "clear_sleep_timer": "mdi:sleep-off" + "join": { + "service": "mdi:link-variant" + }, + "unjoin": { + "service": "mdi:link-variant-off" + }, + "set_sleep_timer": { + "service": "mdi:sleep" + }, + "clear_sleep_timer": { + "service": "mdi:sleep-off" + } } } diff --git a/homeassistant/components/bluesound/media_player.py b/homeassistant/components/bluesound/media_player.py index 92f47977ee5..1ed53d7bfc5 100644 --- a/homeassistant/components/bluesound/media_player.py +++ b/homeassistant/components/bluesound/media_player.py @@ -309,7 +309,7 @@ class BluesoundPlayer(MediaPlayerEntity): return True - async def _start_poll_command(self): + async def _poll_loop(self): """Loop which polls the status of the player.""" while True: try: @@ -335,7 +335,7 @@ class BluesoundPlayer(MediaPlayerEntity): await super().async_added_to_hass() self._polling_task = self.hass.async_create_background_task( - self._start_poll_command(), + self._poll_loop(), name=f"bluesound.polling_{self.host}:{self.port}", ) @@ -345,7 +345,9 @@ class BluesoundPlayer(MediaPlayerEntity): assert self._polling_task is not None if self._polling_task.cancel(): - await self._polling_task + # the sleeps in _poll_loop will raise CancelledError + with suppress(CancelledError): + await self._polling_task self.hass.data[DATA_BLUESOUND].remove(self) diff --git a/homeassistant/components/bluetooth/manifest.json b/homeassistant/components/bluetooth/manifest.json index 657209cdba0..0d17be70e0b 100644 --- a/homeassistant/components/bluetooth/manifest.json +++ b/homeassistant/components/bluetooth/manifest.json @@ -18,8 +18,8 @@ "bleak-retry-connector==3.5.0", "bluetooth-adapters==0.19.4", "bluetooth-auto-recovery==1.4.2", - "bluetooth-data-tools==1.19.4", - "dbus-fast==2.22.1", - "habluetooth==3.1.3" + "bluetooth-data-tools==1.20.0", + "dbus-fast==2.24.0", + "habluetooth==3.4.0" ] } diff --git a/homeassistant/components/bluetooth_tracker/icons.json b/homeassistant/components/bluetooth_tracker/icons.json index 650bf0b6d19..217f1240893 100644 --- a/homeassistant/components/bluetooth_tracker/icons.json +++ b/homeassistant/components/bluetooth_tracker/icons.json @@ -1,5 +1,7 @@ { "services": { - "update": "mdi:update" + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/bmw_connected_drive/__init__.py b/homeassistant/components/bmw_connected_drive/__init__.py index 495359ca314..9e43cfc4187 100644 --- a/homeassistant/components/bmw_connected_drive/__init__.py +++ b/homeassistant/components/bmw_connected_drive/__init__.py @@ -23,8 +23,6 @@ from .coordinator import BMWDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - SERVICE_SCHEMA = vol.Schema( vol.Any( {vol.Required(ATTR_VIN): cv.string}, diff --git a/homeassistant/components/bmw_connected_drive/coordinator.py b/homeassistant/components/bmw_connected_drive/coordinator.py index 6e0ed2ab670..992e7dea6b2 100644 --- a/homeassistant/components/bmw_connected_drive/coordinator.py +++ b/homeassistant/components/bmw_connected_drive/coordinator.py @@ -15,6 +15,7 @@ from homeassistant.const import CONF_PASSWORD, CONF_REGION, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.util.ssl import get_default_context from .const import CONF_GCID, CONF_READ_ONLY, CONF_REFRESH_TOKEN, DOMAIN, SCAN_INTERVALS @@ -33,6 +34,7 @@ class BMWDataUpdateCoordinator(DataUpdateCoordinator[None]): entry.data[CONF_PASSWORD], get_region_from_name(entry.data[CONF_REGION]), observer_position=GPSPosition(hass.config.latitude, hass.config.longitude), + verify=get_default_context(), ) self.read_only = entry.options[CONF_READ_ONLY] self._entry = entry diff --git a/homeassistant/components/bmw_connected_drive/manifest.json b/homeassistant/components/bmw_connected_drive/manifest.json index 304973b816f..6bc9027ac19 100644 --- a/homeassistant/components/bmw_connected_drive/manifest.json +++ b/homeassistant/components/bmw_connected_drive/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_polling", "loggers": ["bimmer_connected"], "quality_scale": "platinum", - "requirements": ["bimmer-connected[china]==0.16.1"] + "requirements": ["bimmer-connected[china]==0.16.3"] } diff --git a/homeassistant/components/bmw_connected_drive/strings.json b/homeassistant/components/bmw_connected_drive/strings.json index 8121ab6f65f..c59900ef4f9 100644 --- a/homeassistant/components/bmw_connected_drive/strings.json +++ b/homeassistant/components/bmw_connected_drive/strings.json @@ -148,7 +148,8 @@ "cooling": "Cooling", "heating": "Heating", "inactive": "Inactive", - "standby": "Standby" + "standby": "Standby", + "ventilation": "Ventilation" } }, "front_left_current_pressure": { diff --git a/homeassistant/components/bond/icons.json b/homeassistant/components/bond/icons.json index 35743d20e65..48b351b1c76 100644 --- a/homeassistant/components/bond/icons.json +++ b/homeassistant/components/bond/icons.json @@ -96,12 +96,26 @@ } }, "services": { - "set_fan_speed_tracked_state": "mdi:fan", - "set_switch_power_tracked_state": "mdi:toggle-switch-variant", - "set_light_power_tracked_state": "mdi:lightbulb", - "set_light_brightness_tracked_state": "mdi:lightbulb-on", - "start_increasing_brightness": "mdi:brightness-7", - "start_decreasing_brightness": "mdi:brightness-1", - "stop": "mdi:stop" + "set_fan_speed_tracked_state": { + "service": "mdi:fan" + }, + "set_switch_power_tracked_state": { + "service": "mdi:toggle-switch-variant" + }, + "set_light_power_tracked_state": { + "service": "mdi:lightbulb" + }, + "set_light_brightness_tracked_state": { + "service": "mdi:lightbulb-on" + }, + "start_increasing_brightness": { + "service": "mdi:brightness-7" + }, + "start_decreasing_brightness": { + "service": "mdi:brightness-1" + }, + "stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/bond/light.py b/homeassistant/components/bond/light.py index 3bff7fe754e..c3cf23e4fad 100644 --- a/homeassistant/components/bond/light.py +++ b/homeassistant/components/bond/light.py @@ -52,7 +52,7 @@ async def async_setup_entry( for service in ENTITY_SERVICES: platform.async_register_entity_service( service, - {}, + None, f"async_{service}", ) diff --git a/homeassistant/components/bring/icons.json b/homeassistant/components/bring/icons.json index 1c6c3bdeca0..6b79fab3c94 100644 --- a/homeassistant/components/bring/icons.json +++ b/homeassistant/components/bring/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "send_message": "mdi:cellphone-message" + "send_message": { + "service": "mdi:cellphone-message" + } } } diff --git a/homeassistant/components/bring/todo.py b/homeassistant/components/bring/todo.py index 001466bc1fe..4fb90860899 100644 --- a/homeassistant/components/bring/todo.py +++ b/homeassistant/components/bring/todo.py @@ -22,7 +22,6 @@ from homeassistant.components.todo import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_platform -from homeassistant.helpers.config_validation import make_entity_service_schema from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -62,14 +61,12 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_PUSH_NOTIFICATION, - make_entity_service_schema( - { - vol.Required(ATTR_NOTIFICATION_TYPE): vol.All( - vol.Upper, cv.enum(BringNotificationType) - ), - vol.Optional(ATTR_ITEM_NAME): cv.string, - } - ), + { + vol.Required(ATTR_NOTIFICATION_TYPE): vol.All( + vol.Upper, cv.enum(BringNotificationType) + ), + vol.Optional(ATTR_ITEM_NAME): cv.string, + }, "async_send_message", ) diff --git a/homeassistant/components/broadlink/config_flow.py b/homeassistant/components/broadlink/config_flow.py index 89d540a27fc..5d7acfd8b84 100644 --- a/homeassistant/components/broadlink/config_flow.py +++ b/homeassistant/components/broadlink/config_flow.py @@ -5,7 +5,7 @@ import errno from functools import partial import logging import socket -from typing import Any +from typing import TYPE_CHECKING, Any import broadlink as blk from broadlink.exceptions import ( @@ -39,9 +39,11 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the Broadlink flow.""" - self.device = None + self.device: blk.Device | None = None - async def async_set_device(self, device, raise_on_progress=True): + async def async_set_device( + self, device: blk.Device, raise_on_progress: bool = True + ) -> None: """Define a device for the config flow.""" if device.type not in DEVICE_TYPES: _LOGGER.error( @@ -90,7 +92,9 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): await self.async_set_device(device) return await self.async_step_auth() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} @@ -127,6 +131,8 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): ) return await self.async_step_auth() + if TYPE_CHECKING: + assert self.device if device.mac == self.device.mac: await self.async_set_device(device, raise_on_progress=False) return await self.async_step_auth() @@ -308,10 +314,10 @@ class BroadlinkFlowHandler(ConfigFlow, domain=DOMAIN): step_id="finish", data_schema=vol.Schema(data_schema), errors=errors ) - async def async_step_import(self, import_info): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a device.""" - self._async_abort_entries_match({CONF_HOST: import_info[CONF_HOST]}) - return await self.async_step_user(import_info) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/brother/manifest.json b/homeassistant/components/brother/manifest.json index 5caaeb2f1a1..d9c8e36aa1d 100644 --- a/homeassistant/components/brother/manifest.json +++ b/homeassistant/components/brother/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_polling", "loggers": ["brother", "pyasn1", "pysmi", "pysnmp"], "quality_scale": "platinum", - "requirements": ["brother==4.2.0"], + "requirements": ["brother==4.3.0"], "zeroconf": [ { "type": "_printer._tcp.local.", diff --git a/homeassistant/components/browser/icons.json b/homeassistant/components/browser/icons.json index 7c971009fd7..680aaf14b86 100644 --- a/homeassistant/components/browser/icons.json +++ b/homeassistant/components/browser/icons.json @@ -1,5 +1,7 @@ { "services": { - "browse_url": "mdi:web" + "browse_url": { + "service": "mdi:web" + } } } diff --git a/homeassistant/components/bsblan/__init__.py b/homeassistant/components/bsblan/__init__.py index 9a471329ba9..5ce90db5043 100644 --- a/homeassistant/components/bsblan/__init__.py +++ b/homeassistant/components/bsblan/__init__.py @@ -2,7 +2,7 @@ import dataclasses -from bsblan import BSBLAN, Device, Info, StaticState +from bsblan import BSBLAN, BSBLANConfig, Device, Info, StaticState from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -22,7 +22,7 @@ PLATFORMS = [Platform.CLIMATE] @dataclasses.dataclass -class HomeAssistantBSBLANData: +class BSBLanData: """BSBLan data stored in the Home Assistant data object.""" coordinator: BSBLanUpdateCoordinator @@ -35,23 +35,29 @@ class HomeAssistantBSBLANData: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up BSB-Lan from a config entry.""" - session = async_get_clientsession(hass) - bsblan = BSBLAN( - entry.data[CONF_HOST], + # create config using BSBLANConfig + config = BSBLANConfig( + host=entry.data[CONF_HOST], passkey=entry.data[CONF_PASSKEY], port=entry.data[CONF_PORT], username=entry.data.get(CONF_USERNAME), password=entry.data.get(CONF_PASSWORD), - session=session, ) + # create BSBLAN client + session = async_get_clientsession(hass) + bsblan = BSBLAN(config, session) + + # Create and perform first refresh of the coordinator coordinator = BSBLanUpdateCoordinator(hass, entry, bsblan) await coordinator.async_config_entry_first_refresh() + # Fetch all required data concurrently device = await bsblan.device() info = await bsblan.info() static = await bsblan.static_values() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = HomeAssistantBSBLANData( + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = BSBLanData( client=bsblan, coordinator=coordinator, device=device, diff --git a/homeassistant/components/bsblan/climate.py b/homeassistant/components/bsblan/climate.py index 1b300e1e738..ae7116143df 100644 --- a/homeassistant/components/bsblan/climate.py +++ b/homeassistant/components/bsblan/climate.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from bsblan import BSBLAN, BSBLANError, Device, Info, State, StaticState +from bsblan import BSBLANError from homeassistant.components.climate import ( ATTR_HVAC_MODE, @@ -21,15 +21,11 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.device_registry import format_mac from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.update_coordinator import ( - CoordinatorEntity, - DataUpdateCoordinator, -) from homeassistant.util.enum import try_parse_enum -from . import HomeAssistantBSBLANData +from . import BSBLanData from .const import ATTR_TARGET_TEMPERATURE, DOMAIN -from .entity import BSBLANEntity +from .entity import BSBLanEntity PARALLEL_UPDATES = 1 @@ -51,24 +47,17 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up BSBLAN device based on a config entry.""" - data: HomeAssistantBSBLANData = hass.data[DOMAIN][entry.entry_id] + data: BSBLanData = hass.data[DOMAIN][entry.entry_id] async_add_entities( [ BSBLANClimate( - data.coordinator, - data.client, - data.device, - data.info, - data.static, - entry, + data, ) ] ) -class BSBLANClimate( - BSBLANEntity, CoordinatorEntity[DataUpdateCoordinator[State]], ClimateEntity -): +class BSBLANClimate(BSBLanEntity, ClimateEntity): """Defines a BSBLAN climate device.""" _attr_has_entity_name = True @@ -80,30 +69,22 @@ class BSBLANClimate( | ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - _attr_preset_modes = PRESET_MODES - # Determine hvac modes + _attr_preset_modes = PRESET_MODES _attr_hvac_modes = HVAC_MODES _enable_turn_on_off_backwards_compatibility = False def __init__( self, - coordinator: DataUpdateCoordinator[State], - client: BSBLAN, - device: Device, - info: Info, - static: StaticState, - entry: ConfigEntry, + data: BSBLanData, ) -> None: """Initialize BSBLAN climate device.""" - super().__init__(client, device, info, static, entry) - CoordinatorEntity.__init__(self, coordinator) - self._attr_unique_id = f"{format_mac(device.MAC)}-climate" + super().__init__(data.coordinator, data) + self._attr_unique_id = f"{format_mac(data.device.MAC)}-climate" - self._attr_min_temp = float(static.min_temp.value) - self._attr_max_temp = float(static.max_temp.value) - # check if self.coordinator.data.current_temperature.unit is "°C" or "°C" - if self.coordinator.data.current_temperature.unit in ("°C", "°C"): + self._attr_min_temp = float(data.static.min_temp.value) + self._attr_max_temp = float(data.static.max_temp.value) + if data.static.min_temp.unit in ("°C", "°C"): self._attr_temperature_unit = UnitOfTemperature.CELSIUS else: self._attr_temperature_unit = UnitOfTemperature.FAHRENHEIT @@ -111,30 +92,30 @@ class BSBLANClimate( @property def current_temperature(self) -> float | None: """Return the current temperature.""" - if self.coordinator.data.current_temperature.value == "---": + if self.coordinator.data.state.current_temperature.value == "---": # device returns no current temperature return None - return float(self.coordinator.data.current_temperature.value) + return float(self.coordinator.data.state.current_temperature.value) @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" - return float(self.coordinator.data.target_temperature.value) + return float(self.coordinator.data.state.target_temperature.value) @property def hvac_mode(self) -> HVACMode | None: """Return hvac operation ie. heat, cool mode.""" - if self.coordinator.data.hvac_mode.value == PRESET_ECO: + if self.coordinator.data.state.hvac_mode.value == PRESET_ECO: return HVACMode.AUTO - return try_parse_enum(HVACMode, self.coordinator.data.hvac_mode.value) + return try_parse_enum(HVACMode, self.coordinator.data.state.hvac_mode.value) @property def preset_mode(self) -> str | None: """Return the current preset mode.""" if ( self.hvac_mode == HVACMode.AUTO - and self.coordinator.data.hvac_mode.value == PRESET_ECO + and self.coordinator.data.state.hvac_mode.value == PRESET_ECO ): return PRESET_ECO return PRESET_NONE @@ -173,7 +154,7 @@ class BSBLANClimate( else: data[ATTR_HVAC_MODE] = kwargs[ATTR_PRESET_MODE] try: - await self.client.thermostat(**data) + await self.coordinator.client.thermostat(**data) except BSBLANError as err: raise HomeAssistantError( "An error occurred while updating the BSBLAN device", diff --git a/homeassistant/components/bsblan/config_flow.py b/homeassistant/components/bsblan/config_flow.py index 9732f0a77a9..a1d7d6d403a 100644 --- a/homeassistant/components/bsblan/config_flow.py +++ b/homeassistant/components/bsblan/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from typing import Any -from bsblan import BSBLAN, BSBLANError +from bsblan import BSBLAN, BSBLANConfig, BSBLANError import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult @@ -80,15 +80,15 @@ class BSBLANFlowHandler(ConfigFlow, domain=DOMAIN): async def _get_bsblan_info(self, raise_on_progress: bool = True) -> None: """Get device information from an BSBLAN device.""" - session = async_get_clientsession(self.hass) - bsblan = BSBLAN( + config = BSBLANConfig( host=self.host, - username=self.username, - password=self.password, passkey=self.passkey, port=self.port, - session=session, + username=self.username, + password=self.password, ) + session = async_get_clientsession(self.hass) + bsblan = BSBLAN(config, session) device = await bsblan.device() self.mac = device.MAC diff --git a/homeassistant/components/bsblan/const.py b/homeassistant/components/bsblan/const.py index 5bca20cb4d4..25d9dec865b 100644 --- a/homeassistant/components/bsblan/const.py +++ b/homeassistant/components/bsblan/const.py @@ -21,6 +21,4 @@ ATTR_OUTSIDE_TEMPERATURE: Final = "outside_temperature" CONF_PASSKEY: Final = "passkey" -CONF_DEVICE_IDENT: Final = "RVS21.831F/127" - DEFAULT_PORT: Final = 80 diff --git a/homeassistant/components/bsblan/coordinator.py b/homeassistant/components/bsblan/coordinator.py index 864daacc562..3320c0f7500 100644 --- a/homeassistant/components/bsblan/coordinator.py +++ b/homeassistant/components/bsblan/coordinator.py @@ -1,12 +1,10 @@ """DataUpdateCoordinator for the BSB-Lan integration.""" -from __future__ import annotations - +from dataclasses import dataclass from datetime import timedelta from random import randint -from bsblan import BSBLAN, BSBLANConnectionError -from bsblan.models import State +from bsblan import BSBLAN, BSBLANConnectionError, State from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -16,7 +14,14 @@ from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, Upda from .const import DOMAIN, LOGGER, SCAN_INTERVAL -class BSBLanUpdateCoordinator(DataUpdateCoordinator[State]): +@dataclass +class BSBLanCoordinatorData: + """BSBLan data stored in the Home Assistant data object.""" + + state: State + + +class BSBLanUpdateCoordinator(DataUpdateCoordinator[BSBLanCoordinatorData]): """The BSB-Lan update coordinator.""" config_entry: ConfigEntry @@ -28,30 +33,32 @@ class BSBLanUpdateCoordinator(DataUpdateCoordinator[State]): client: BSBLAN, ) -> None: """Initialize the BSB-Lan coordinator.""" - - self.client = client - super().__init__( hass, - LOGGER, + logger=LOGGER, name=f"{DOMAIN}_{config_entry.data[CONF_HOST]}", - # use the default scan interval and add a random number of seconds to avoid timeouts when - # the BSB-Lan device is already/still busy retrieving data, - # e.g. for MQTT or internal logging. - update_interval=SCAN_INTERVAL + timedelta(seconds=randint(1, 8)), + update_interval=self._get_update_interval(), ) + self.client = client - async def _async_update_data(self) -> State: - """Get state from BSB-Lan device.""" + def _get_update_interval(self) -> timedelta: + """Get the update interval with a random offset. - # use the default scan interval and add a random number of seconds to avoid timeouts when - # the BSB-Lan device is already/still busy retrieving data, e.g. for MQTT or internal logging. - self.update_interval = SCAN_INTERVAL + timedelta(seconds=randint(1, 8)) + Use the default scan interval and add a random number of seconds to avoid timeouts when + the BSB-Lan device is already/still busy retrieving data, + e.g. for MQTT or internal logging. + """ + return SCAN_INTERVAL + timedelta(seconds=randint(1, 8)) + async def _async_update_data(self) -> BSBLanCoordinatorData: + """Get state and sensor data from BSB-Lan device.""" try: - return await self.client.state() + state = await self.client.state() except BSBLANConnectionError as err: + host = self.config_entry.data[CONF_HOST] if self.config_entry else "unknown" raise UpdateFailed( - f"Error while establishing connection with " - f"BSB-Lan device at {self.config_entry.data[CONF_HOST]}" + f"Error while establishing connection with BSB-Lan device at {host}" ) from err + + self.update_interval = self._get_update_interval() + return BSBLanCoordinatorData(state=state) diff --git a/homeassistant/components/bsblan/diagnostics.py b/homeassistant/components/bsblan/diagnostics.py index 0bceed0bf23..3b42d47e1d3 100644 --- a/homeassistant/components/bsblan/diagnostics.py +++ b/homeassistant/components/bsblan/diagnostics.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -from . import HomeAssistantBSBLANData +from . import BSBLanData from .const import DOMAIN @@ -15,9 +15,10 @@ async def async_get_config_entry_diagnostics( hass: HomeAssistant, entry: ConfigEntry ) -> dict[str, Any]: """Return diagnostics for a config entry.""" - data: HomeAssistantBSBLANData = hass.data[DOMAIN][entry.entry_id] + data: BSBLanData = hass.data[DOMAIN][entry.entry_id] + return { - "info": data.info.dict(), - "device": data.device.dict(), - "state": data.coordinator.data.dict(), + "info": data.info.to_dict(), + "device": data.device.to_dict(), + "state": data.coordinator.data.state.to_dict(), } diff --git a/homeassistant/components/bsblan/entity.py b/homeassistant/components/bsblan/entity.py index a69c4d2217e..0c507938794 100644 --- a/homeassistant/components/bsblan/entity.py +++ b/homeassistant/components/bsblan/entity.py @@ -1,41 +1,35 @@ -"""Base entity for the BSBLAN integration.""" +"""BSBLan base entity.""" from __future__ import annotations -from bsblan import BSBLAN, Device, Info, StaticState - -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_HOST from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, DeviceInfo, format_mac, ) -from homeassistant.helpers.entity import Entity +from homeassistant.helpers.update_coordinator import CoordinatorEntity +from . import BSBLanData from .const import DOMAIN +from .coordinator import BSBLanUpdateCoordinator -class BSBLANEntity(Entity): - """Defines a BSBLAN entity.""" +class BSBLanEntity(CoordinatorEntity[BSBLanUpdateCoordinator]): + """Defines a base BSBLan entity.""" - def __init__( - self, - client: BSBLAN, - device: Device, - info: Info, - static: StaticState, - entry: ConfigEntry, - ) -> None: - """Initialize an BSBLAN entity.""" - self.client = client + _attr_has_entity_name = True + def __init__(self, coordinator: BSBLanUpdateCoordinator, data: BSBLanData) -> None: + """Initialize BSBLan entity.""" + super().__init__(coordinator, data) + host = self.coordinator.config_entry.data["host"] + mac = self.coordinator.config_entry.data["mac"] self._attr_device_info = DeviceInfo( - connections={(CONNECTION_NETWORK_MAC, format_mac(device.MAC))}, - identifiers={(DOMAIN, format_mac(device.MAC))}, + identifiers={(DOMAIN, data.device.MAC)}, + connections={(CONNECTION_NETWORK_MAC, format_mac(mac))}, + name=data.device.name, manufacturer="BSBLAN Inc.", - model=info.device_identification.value, - name=device.name, - sw_version=f"{device.version})", - configuration_url=f"http://{entry.data[CONF_HOST]}", + model=data.info.device_identification.value, + sw_version=data.device.version, + configuration_url=f"http://{host}", ) diff --git a/homeassistant/components/bsblan/manifest.json b/homeassistant/components/bsblan/manifest.json index 3f58fbe364c..6cd8608c42d 100644 --- a/homeassistant/components/bsblan/manifest.json +++ b/homeassistant/components/bsblan/manifest.json @@ -7,5 +7,5 @@ "integration_type": "device", "iot_class": "local_polling", "loggers": ["bsblan"], - "requirements": ["python-bsblan==0.5.18"] + "requirements": ["python-bsblan==0.6.2"] } diff --git a/homeassistant/components/bt_smarthub/device_tracker.py b/homeassistant/components/bt_smarthub/device_tracker.py index 10c8000fb93..4b52f38ff31 100644 --- a/homeassistant/components/bt_smarthub/device_tracker.py +++ b/homeassistant/components/bt_smarthub/device_tracker.py @@ -51,7 +51,7 @@ def _create_device(data): return _Device(ip_address, mac, host, status, name) -_Device = namedtuple("_Device", ["ip_address", "mac", "host", "status", "name"]) +_Device = namedtuple("_Device", ["ip_address", "mac", "host", "status", "name"]) # noqa: PYI024 class BTSmartHubScanner(DeviceScanner): diff --git a/homeassistant/components/bthome/sensor.py b/homeassistant/components/bthome/sensor.py index 656addad620..64e6d61cefb 100644 --- a/homeassistant/components/bthome/sensor.py +++ b/homeassistant/components/bthome/sensor.py @@ -27,6 +27,7 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfConductivity, UnitOfElectricCurrent, UnitOfElectricPotential, UnitOfEnergy, @@ -356,6 +357,16 @@ SENSOR_DESCRIPTIONS = { native_unit_of_measurement=UnitOfVolume.LITERS, state_class=SensorStateClass.TOTAL, ), + # Conductivity (µS/cm) + ( + BTHomeSensorDeviceClass.CONDUCTIVITY, + Units.CONDUCTIVITY, + ): SensorEntityDescription( + key=f"{BTHomeSensorDeviceClass.CONDUCTIVITY}_{Units.CONDUCTIVITY}", + device_class=SensorDeviceClass.CONDUCTIVITY, + native_unit_of_measurement=UnitOfConductivity.MICROSIEMENS, + state_class=SensorStateClass.MEASUREMENT, + ), } diff --git a/homeassistant/components/buienradar/const.py b/homeassistant/components/buienradar/const.py index c82970ed318..fd92afd59b0 100644 --- a/homeassistant/components/buienradar/const.py +++ b/homeassistant/components/buienradar/const.py @@ -2,6 +2,7 @@ DOMAIN = "buienradar" +DEFAULT_TIMEOUT = 60 DEFAULT_TIMEFRAME = 60 DEFAULT_DIMENSION = 700 diff --git a/homeassistant/components/buienradar/util.py b/homeassistant/components/buienradar/util.py index b641644cebe..f089fce89b7 100644 --- a/homeassistant/components/buienradar/util.py +++ b/homeassistant/components/buienradar/util.py @@ -1,9 +1,9 @@ """Shared utilities for different supported platforms.""" -from asyncio import timeout from datetime import datetime, timedelta from http import HTTPStatus import logging +from typing import Any import aiohttp from buienradar.buienradar import parse_data @@ -27,12 +27,12 @@ from buienradar.constants import ( from buienradar.urls import JSON_FEED_URL, json_precipitation_forecast_url from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE -from homeassistant.core import CALLBACK_TYPE, callback +from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.event import async_track_point_in_utc_time from homeassistant.util import dt as dt_util -from .const import SCHEDULE_NOK, SCHEDULE_OK +from .const import DEFAULT_TIMEOUT, SCHEDULE_NOK, SCHEDULE_OK __all__ = ["BrData"] _LOGGER = logging.getLogger(__name__) @@ -59,10 +59,10 @@ class BrData: load_error_count: int = WARN_THRESHOLD rain_error_count: int = WARN_THRESHOLD - def __init__(self, hass, coordinates, timeframe, devices): + def __init__(self, hass: HomeAssistant, coordinates, timeframe, devices) -> None: """Initialize the data object.""" self.devices = devices - self.data = {} + self.data: dict[str, Any] | None = {} self.hass = hass self.coordinates = coordinates self.timeframe = timeframe @@ -93,9 +93,9 @@ class BrData: resp = None try: websession = async_get_clientsession(self.hass) - async with timeout(10): - resp = await websession.get(url) - + async with websession.get( + url, timeout=aiohttp.ClientTimeout(total=DEFAULT_TIMEOUT) + ) as resp: result[STATUS_CODE] = resp.status result[CONTENT] = await resp.text() if resp.status == HTTPStatus.OK: diff --git a/homeassistant/components/buienradar/weather.py b/homeassistant/components/buienradar/weather.py index 02e1f444c9c..2af66982fab 100644 --- a/homeassistant/components/buienradar/weather.py +++ b/homeassistant/components/buienradar/weather.py @@ -130,7 +130,7 @@ class BrWeather(WeatherEntity): _attr_should_poll = False _attr_supported_features = WeatherEntityFeature.FORECAST_DAILY - def __init__(self, config, coordinates): + def __init__(self, config, coordinates) -> None: """Initialize the platform with a data instance and station name.""" self._stationname = config.get(CONF_NAME, "Buienradar") self._attr_name = self._stationname or f"BR {'(unknown station)'}" diff --git a/homeassistant/components/button/__init__.py b/homeassistant/components/button/__init__.py index 323f9eddd77..3955fabdf00 100644 --- a/homeassistant/components/button/__init__.py +++ b/homeassistant/components/button/__init__.py @@ -54,7 +54,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_PRESS, - {}, + None, "_async_press_action", ) diff --git a/homeassistant/components/button/icons.json b/homeassistant/components/button/icons.json index 71956124d7f..1364fb2d056 100644 --- a/homeassistant/components/button/icons.json +++ b/homeassistant/components/button/icons.json @@ -14,6 +14,8 @@ } }, "services": { - "press": "mdi:gesture-tap-button" + "press": { + "service": "mdi:gesture-tap-button" + } } } diff --git a/homeassistant/components/calendar/icons.json b/homeassistant/components/calendar/icons.json index e4e526fe75c..9b8df3ec6d3 100644 --- a/homeassistant/components/calendar/icons.json +++ b/homeassistant/components/calendar/icons.json @@ -9,8 +9,14 @@ } }, "services": { - "create_event": "mdi:calendar-plus", - "get_events": "mdi:calendar-month", - "list_events": "mdi:calendar-month" + "create_event": { + "service": "mdi:calendar-plus" + }, + "get_events": { + "service": "mdi:calendar-month" + }, + "list_events": { + "service": "mdi:calendar-month" + } } } diff --git a/homeassistant/components/camera/__init__.py b/homeassistant/components/camera/__init__.py index d8fa4bfbc7a..859ced1ba86 100644 --- a/homeassistant/components/camera/__init__.py +++ b/homeassistant/components/camera/__init__.py @@ -431,13 +431,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, unsub_track_time_interval) component.async_register_entity_service( - SERVICE_ENABLE_MOTION, {}, "async_enable_motion_detection" + SERVICE_ENABLE_MOTION, None, "async_enable_motion_detection" ) component.async_register_entity_service( - SERVICE_DISABLE_MOTION, {}, "async_disable_motion_detection" + SERVICE_DISABLE_MOTION, None, "async_disable_motion_detection" ) - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") component.async_register_entity_service( SERVICE_SNAPSHOT, CAMERA_SERVICE_SNAPSHOT, async_handle_snapshot_service ) @@ -862,7 +862,7 @@ class CameraMjpegStream(CameraView): # Compose camera stream from stills interval = float(interval_str) if interval < MIN_STREAM_INTERVAL: - raise ValueError(f"Stream interval must be > {MIN_STREAM_INTERVAL}") + raise ValueError(f"Stream interval must be > {MIN_STREAM_INTERVAL}") # noqa: TRY301 return await camera.handle_async_still_stream(request, interval) except ValueError as err: raise web.HTTPBadRequest from err @@ -992,7 +992,6 @@ async def async_handle_snapshot_service( """Handle snapshot services calls.""" hass = camera.hass filename: Template = service_call.data[ATTR_FILENAME] - filename.hass = hass snapshot_file = filename.async_render(variables={ATTR_ENTITY_ID: camera}) @@ -1069,9 +1068,7 @@ async def async_handle_record_service( if not stream: raise HomeAssistantError(f"{camera.entity_id} does not support record service") - hass = camera.hass filename = service_call.data[CONF_FILENAME] - filename.hass = hass video_path = filename.async_render(variables={ATTR_ENTITY_ID: camera}) await stream.async_record( diff --git a/homeassistant/components/camera/icons.json b/homeassistant/components/camera/icons.json index 37e71c80a67..982074cd553 100644 --- a/homeassistant/components/camera/icons.json +++ b/homeassistant/components/camera/icons.json @@ -8,12 +8,26 @@ } }, "services": { - "disable_motion_detection": "mdi:motion-sensor-off", - "enable_motion_detection": "mdi:motion-sensor", - "play_stream": "mdi:play", - "record": "mdi:record-rec", - "snapshot": "mdi:camera", - "turn_off": "mdi:video-off", - "turn_on": "mdi:video" + "disable_motion_detection": { + "service": "mdi:motion-sensor-off" + }, + "enable_motion_detection": { + "service": "mdi:motion-sensor" + }, + "play_stream": { + "service": "mdi:play" + }, + "record": { + "service": "mdi:record-rec" + }, + "snapshot": { + "service": "mdi:camera" + }, + "turn_off": { + "service": "mdi:video-off" + }, + "turn_on": { + "service": "mdi:video" + } } } diff --git a/homeassistant/components/camera/manifest.json b/homeassistant/components/camera/manifest.json index b1df158a260..9c56d97f910 100644 --- a/homeassistant/components/camera/manifest.json +++ b/homeassistant/components/camera/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/camera", "integration_type": "entity", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.1"] + "requirements": ["PyTurboJPEG==1.7.5"] } diff --git a/homeassistant/components/canary/config_flow.py b/homeassistant/components/canary/config_flow.py index 6ae7632a7e2..5af7142af8f 100644 --- a/homeassistant/components/canary/config_flow.py +++ b/homeassistant/components/canary/config_flow.py @@ -54,11 +54,9 @@ class CanaryConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return CanaryOptionsFlowHandler(config_entry) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initiated by configuration file.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/cast/__init__.py b/homeassistant/components/cast/__init__.py index b41dc9ddb41..e72eb196b61 100644 --- a/homeassistant/components/cast/__init__.py +++ b/homeassistant/components/cast/__init__.py @@ -11,7 +11,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from homeassistant.helpers.integration_platform import ( async_process_integration_platforms, ) @@ -19,7 +19,6 @@ from homeassistant.helpers.integration_platform import ( from . import home_assistant_cast from .const import DOMAIN -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [Platform.MEDIA_PLAYER] diff --git a/homeassistant/components/cast/config_flow.py b/homeassistant/components/cast/config_flow.py index 6ccd7be19c3..22351f5d2f7 100644 --- a/homeassistant/components/cast/config_flow.py +++ b/homeassistant/components/cast/config_flow.py @@ -29,11 +29,11 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize flow.""" - self._ignore_cec = set() - self._known_hosts = set() - self._wanted_uuid = set() + self._ignore_cec = set[str]() + self._known_hosts = set[str]() + self._wanted_uuid = set[str]() @staticmethod @callback @@ -43,7 +43,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return CastOptionsFlowHandler(config_entry) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/cast/icons.json b/homeassistant/components/cast/icons.json index e19ea0b07b2..a43411eaad3 100644 --- a/homeassistant/components/cast/icons.json +++ b/homeassistant/components/cast/icons.json @@ -1,5 +1,7 @@ { "services": { - "show_lovelace_view": "mdi:view-dashboard" + "show_lovelace_view": { + "service": "mdi:view-dashboard" + } } } diff --git a/homeassistant/components/cert_expiry/config_flow.py b/homeassistant/components/cert_expiry/config_flow.py index 8f937ef61ea..22d443c700d 100644 --- a/homeassistant/components/cert_expiry/config_flow.py +++ b/homeassistant/components/cert_expiry/config_flow.py @@ -95,12 +95,9 @@ class CertexpiryConfigFlow(ConfigFlow, domain=DOMAIN): errors=self._errors, ) - async def async_step_import( - self, - user_input: Mapping[str, Any] | None = None, - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry. Only host was required in the yaml file all other fields are optional """ - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) diff --git a/homeassistant/components/chacon_dio/__init__.py b/homeassistant/components/chacon_dio/__init__.py index 00558572fca..94617cb3929 100644 --- a/homeassistant/components/chacon_dio/__init__.py +++ b/homeassistant/components/chacon_dio/__init__.py @@ -17,7 +17,7 @@ from homeassistant.core import Event, HomeAssistant _LOGGER = logging.getLogger(__name__) -PLATFORMS: list[Platform] = [Platform.COVER] +PLATFORMS: list[Platform] = [Platform.COVER, Platform.SWITCH] @dataclass diff --git a/homeassistant/components/chacon_dio/entity.py b/homeassistant/components/chacon_dio/entity.py index 38f3d7f5831..7cec6810897 100644 --- a/homeassistant/components/chacon_dio/entity.py +++ b/homeassistant/components/chacon_dio/entity.py @@ -51,3 +51,11 @@ class ChaconDioEntity(Entity): _LOGGER.debug("Data received from server %s", data) self._update_attr(data) self.async_write_ha_state() + + async def async_update(self) -> None: + """Update the state when the entity is requested to.""" + + _LOGGER.debug("Update called for %s, %s", self, self.target_id) + data = await self.client.get_status_details([self.target_id]) + _LOGGER.debug("Received data from server %s", data) + self._update_attr(data[self.target_id]) diff --git a/homeassistant/components/chacon_dio/manifest.json b/homeassistant/components/chacon_dio/manifest.json index c0f4059e798..edee24444f7 100644 --- a/homeassistant/components/chacon_dio/manifest.json +++ b/homeassistant/components/chacon_dio/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/chacon_dio", "iot_class": "cloud_push", "loggers": ["dio_chacon_api"], - "requirements": ["dio-chacon-wifi-api==1.2.0"] + "requirements": ["dio-chacon-wifi-api==1.2.1"] } diff --git a/homeassistant/components/chacon_dio/switch.py b/homeassistant/components/chacon_dio/switch.py new file mode 100644 index 00000000000..be178c3c3b5 --- /dev/null +++ b/homeassistant/components/chacon_dio/switch.py @@ -0,0 +1,74 @@ +"""Switch Platform for Chacon Dio REV-LIGHT and switch plug devices.""" + +import logging +from typing import Any + +from dio_chacon_wifi_api.const import DeviceTypeEnum + +from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import ChaconDioConfigEntry +from .entity import ChaconDioEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ChaconDioConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Chacon Dio switch devices.""" + data = config_entry.runtime_data + client = data.client + + async_add_entities( + ChaconDioSwitch(client, device) + for device in data.list_devices + if device["type"] + in (DeviceTypeEnum.SWITCH_LIGHT.value, DeviceTypeEnum.SWITCH_PLUG.value) + ) + + +class ChaconDioSwitch(ChaconDioEntity, SwitchEntity): + """Object for controlling a Chacon Dio switch.""" + + _attr_device_class = SwitchDeviceClass.SWITCH + _attr_name = None + + def _update_attr(self, data: dict[str, Any]) -> None: + """Recomputes the attributes values either at init or when the device state changes.""" + self._attr_available = data["connected"] + self._attr_is_on = data["is_on"] + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the switch. + + Turned on status is effective after the server callback that triggers callback_device_state. + """ + + _LOGGER.debug( + "Turn on the switch %s , %s, %s", + self.target_id, + self.entity_id, + self._attr_is_on, + ) + + await self.client.switch_switch(self.target_id, True) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the switch. + + Turned on status is effective after the server callback that triggers callback_device_state. + """ + + _LOGGER.debug( + "Turn off the switch %s , %s, %s", + self.target_id, + self.entity_id, + self._attr_is_on, + ) + + await self.client.switch_switch(self.target_id, False) diff --git a/homeassistant/components/channels/icons.json b/homeassistant/components/channels/icons.json index cbbda1ef623..ad5504a5422 100644 --- a/homeassistant/components/channels/icons.json +++ b/homeassistant/components/channels/icons.json @@ -1,7 +1,13 @@ { "services": { - "seek_forward": "mdi:skip-forward", - "seek_backward": "mdi:skip-backward", - "seek_by": "mdi:timer-check-outline" + "seek_forward": { + "service": "mdi:skip-forward" + }, + "seek_backward": { + "service": "mdi:skip-backward" + }, + "seek_by": { + "service": "mdi:timer-check-outline" + } } } diff --git a/homeassistant/components/channels/media_player.py b/homeassistant/components/channels/media_player.py index 07ed8ce7d66..f6de35a4156 100644 --- a/homeassistant/components/channels/media_player.py +++ b/homeassistant/components/channels/media_player.py @@ -49,12 +49,12 @@ async def async_setup_platform( platform.async_register_entity_service( SERVICE_SEEK_FORWARD, - {}, + None, "seek_forward", ) platform.async_register_entity_service( SERVICE_SEEK_BACKWARD, - {}, + None, "seek_backward", ) platform.async_register_entity_service( diff --git a/homeassistant/components/climate/__init__.py b/homeassistant/components/climate/__init__.py index f546ae0e671..6097e4f1346 100644 --- a/homeassistant/components/climate/__init__.py +++ b/homeassistant/components/climate/__init__.py @@ -156,19 +156,19 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_TURN_ON, - {}, + None, "async_turn_on", [ClimateEntityFeature.TURN_ON], ) component.async_register_entity_service( SERVICE_TURN_OFF, - {}, + None, "async_turn_off", [ClimateEntityFeature.TURN_OFF], ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [ClimateEntityFeature.TURN_OFF, ClimateEntityFeature.TURN_ON], ) diff --git a/homeassistant/components/climate/icons.json b/homeassistant/components/climate/icons.json index ea6c504ce25..c9a8d12d01b 100644 --- a/homeassistant/components/climate/icons.json +++ b/homeassistant/components/climate/icons.json @@ -56,15 +56,35 @@ } }, "services": { - "set_fan_mode": "mdi:fan", - "set_humidity": "mdi:water-percent", - "set_swing_mode": "mdi:arrow-oscillating", - "set_temperature": "mdi:thermometer", - "set_aux_heat": "mdi:radiator", - "set_preset_mode": "mdi:sofa", - "set_hvac_mode": "mdi:hvac", - "turn_on": "mdi:power-on", - "turn_off": "mdi:power-off", - "toggle": "mdi:toggle-switch" + "set_fan_mode": { + "service": "mdi:fan" + }, + "set_humidity": { + "service": "mdi:water-percent" + }, + "set_swing_mode": { + "service": "mdi:arrow-oscillating" + }, + "set_temperature": { + "service": "mdi:thermometer" + }, + "set_aux_heat": { + "service": "mdi:radiator" + }, + "set_preset_mode": { + "service": "mdi:sofa" + }, + "set_hvac_mode": { + "service": "mdi:hvac" + }, + "turn_on": { + "service": "mdi:power-on" + }, + "turn_off": { + "service": "mdi:power-off" + }, + "toggle": { + "service": "mdi:toggle-switch" + } } } diff --git a/homeassistant/components/cloud/icons.json b/homeassistant/components/cloud/icons.json index 06ee7eb2f19..32888fa75c7 100644 --- a/homeassistant/components/cloud/icons.json +++ b/homeassistant/components/cloud/icons.json @@ -1,6 +1,10 @@ { "services": { - "remote_connect": "mdi:cloud", - "remote_disconnect": "mdi:cloud-off" + "remote_connect": { + "service": "mdi:cloud" + }, + "remote_disconnect": { + "service": "mdi:cloud-off" + } } } diff --git a/homeassistant/components/cloud/tts.py b/homeassistant/components/cloud/tts.py index 8cf18c08314..4dbee10fbaf 100644 --- a/homeassistant/components/cloud/tts.py +++ b/homeassistant/components/cloud/tts.py @@ -221,7 +221,7 @@ class CloudProvider(Provider): def __init__(self, cloud: Cloud[CloudClient]) -> None: """Initialize cloud provider.""" self.cloud = cloud - self.name = "Cloud" + self.name = "Home Assistant Cloud" self._language, self._voice = cloud.client.prefs.tts_default_voice cloud.client.prefs.async_listen_updates(self._sync_prefs) diff --git a/homeassistant/components/cloudflare/__init__.py b/homeassistant/components/cloudflare/__init__.py index 5934e43f8a2..bd27be71d18 100644 --- a/homeassistant/components/cloudflare/__init__.py +++ b/homeassistant/components/cloudflare/__init__.py @@ -18,7 +18,6 @@ from homeassistant.exceptions import ( HomeAssistantError, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.event import async_track_time_interval from homeassistant.util.location import async_detect_location_info from homeassistant.util.network import is_ipv4_address @@ -27,8 +26,6 @@ from .const import CONF_RECORDS, DEFAULT_UPDATE_INTERVAL, DOMAIN, SERVICE_UPDATE _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Cloudflare from a config entry.""" diff --git a/homeassistant/components/cloudflare/icons.json b/homeassistant/components/cloudflare/icons.json index 6bf6d773fc3..2d452716c94 100644 --- a/homeassistant/components/cloudflare/icons.json +++ b/homeassistant/components/cloudflare/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_records": "mdi:dns" + "update_records": { + "service": "mdi:dns" + } } } diff --git a/homeassistant/components/coinbase/__init__.py b/homeassistant/components/coinbase/__init__.py index 0181c12a2e7..f5fd8fa1dc3 100644 --- a/homeassistant/components/coinbase/__init__.py +++ b/homeassistant/components/coinbase/__init__.py @@ -13,7 +13,7 @@ from coinbase.wallet.error import AuthenticationError from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.util import Throttle from .const import ( @@ -46,9 +46,6 @@ PLATFORMS = [Platform.SENSOR] MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Coinbase from a config entry.""" diff --git a/homeassistant/components/color_extractor/icons.json b/homeassistant/components/color_extractor/icons.json index 07b449ffc54..9dab17a9f3b 100644 --- a/homeassistant/components/color_extractor/icons.json +++ b/homeassistant/components/color_extractor/icons.json @@ -1,5 +1,7 @@ { "services": { - "turn_on": "mdi:lightbulb-on" + "turn_on": { + "service": "mdi:lightbulb-on" + } } } diff --git a/homeassistant/components/command_line/binary_sensor.py b/homeassistant/components/command_line/binary_sensor.py index 2ff17e86efd..f5d9ad9d63d 100644 --- a/homeassistant/components/command_line/binary_sensor.py +++ b/homeassistant/components/command_line/binary_sensor.py @@ -40,6 +40,8 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Command line Binary Sensor.""" + if not discovery_info: + return discovery_info = cast(DiscoveryInfoType, discovery_info) binary_sensor_config = discovery_info @@ -51,9 +53,7 @@ async def async_setup_platform( scan_interval: timedelta = binary_sensor_config.get( CONF_SCAN_INTERVAL, SCAN_INTERVAL ) - - if value_template := binary_sensor_config.get(CONF_VALUE_TEMPLATE): - value_template.hass = hass + value_template: Template | None = binary_sensor_config.get(CONF_VALUE_TEMPLATE) data = CommandSensorData(hass, command, command_timeout) diff --git a/homeassistant/components/command_line/cover.py b/homeassistant/components/command_line/cover.py index 2c6ec78b689..8ddfd399ba8 100644 --- a/homeassistant/components/command_line/cover.py +++ b/homeassistant/components/command_line/cover.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from datetime import datetime, timedelta -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from homeassistant.components.cover import CoverEntity from homeassistant.const import ( @@ -37,6 +37,8 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up cover controlled by shell commands.""" + if not discovery_info: + return covers = [] discovery_info = cast(DiscoveryInfoType, discovery_info) @@ -45,9 +47,6 @@ async def async_setup_platform( } for device_name, cover_config in entities.items(): - if value_template := cover_config.get(CONF_VALUE_TEMPLATE): - value_template.hass = hass - trigger_entity_config = { CONF_NAME: Template(cover_config.get(CONF_NAME, device_name), hass), **{k: v for k, v in cover_config.items() if k in TRIGGER_ENTITY_OPTIONS}, @@ -60,7 +59,7 @@ async def async_setup_platform( cover_config[CONF_COMMAND_CLOSE], cover_config[CONF_COMMAND_STOP], cover_config.get(CONF_COMMAND_STATE), - value_template, + cover_config.get(CONF_VALUE_TEMPLATE), cover_config[CONF_COMMAND_TIMEOUT], cover_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL), ) @@ -113,7 +112,7 @@ class CommandCover(ManualTriggerEntity, CoverEntity): async def _async_move_cover(self, command: str) -> bool: """Execute the actual commands.""" - LOGGER.info("Running command: %s", command) + LOGGER.debug("Running command: %s", command) returncode = await async_call_shell_with_timeout(command, self._timeout) success = returncode == 0 @@ -142,10 +141,10 @@ class CommandCover(ManualTriggerEntity, CoverEntity): async def _async_query_state(self) -> str | None: """Query for the state.""" - if self._command_state: - LOGGER.info("Running state value command: %s", self._command_state) - return await async_check_output_or_log(self._command_state, self._timeout) - return None + if TYPE_CHECKING: + assert self._command_state + LOGGER.debug("Running state value command: %s", self._command_state) + return await async_check_output_or_log(self._command_state, self._timeout) async def _update_entity_state(self, now: datetime | None = None) -> None: """Update the state of the entity.""" diff --git a/homeassistant/components/command_line/icons.json b/homeassistant/components/command_line/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/command_line/icons.json +++ b/homeassistant/components/command_line/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/command_line/notify.py b/homeassistant/components/command_line/notify.py index 14245b72288..4f5a4e4b499 100644 --- a/homeassistant/components/command_line/notify.py +++ b/homeassistant/components/command_line/notify.py @@ -21,8 +21,10 @@ def get_service( hass: HomeAssistant, config: ConfigType, discovery_info: DiscoveryInfoType | None = None, -) -> CommandLineNotificationService: +) -> CommandLineNotificationService | None: """Get the Command Line notification service.""" + if not discovery_info: + return None discovery_info = cast(DiscoveryInfoType, discovery_info) notify_config = discovery_info diff --git a/homeassistant/components/command_line/sensor.py b/homeassistant/components/command_line/sensor.py index 14edbb55ed0..7c31af165f9 100644 --- a/homeassistant/components/command_line/sensor.py +++ b/homeassistant/components/command_line/sensor.py @@ -48,6 +48,8 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Command Sensor.""" + if not discovery_info: + return discovery_info = cast(DiscoveryInfoType, discovery_info) sensor_config = discovery_info @@ -57,11 +59,9 @@ async def async_setup_platform( json_attributes: list[str] | None = sensor_config.get(CONF_JSON_ATTRIBUTES) json_attributes_path: str | None = sensor_config.get(CONF_JSON_ATTRIBUTES_PATH) scan_interval: timedelta = sensor_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL) + value_template: Template | None = sensor_config.get(CONF_VALUE_TEMPLATE) data = CommandSensorData(hass, command, command_timeout) - if value_template := sensor_config.get(CONF_VALUE_TEMPLATE): - value_template.hass = hass - trigger_entity_config = { CONF_NAME: Template(sensor_config[CONF_NAME], hass), **{k: v for k, v in sensor_config.items() if k in TRIGGER_ENTITY_OPTIONS}, diff --git a/homeassistant/components/command_line/switch.py b/homeassistant/components/command_line/switch.py index f8e9d21cf23..e42c2226cf2 100644 --- a/homeassistant/components/command_line/switch.py +++ b/homeassistant/components/command_line/switch.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from datetime import datetime, timedelta -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast from homeassistant.components.switch import ENTITY_ID_FORMAT, SwitchEntity from homeassistant.const import ( @@ -36,6 +36,8 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Find and return switches controlled by shell commands.""" + if not discovery_info: + return switches = [] discovery_info = cast(DiscoveryInfoType, discovery_info) @@ -44,9 +46,6 @@ async def async_setup_platform( } for object_id, switch_config in entities.items(): - if value_template := switch_config.get(CONF_VALUE_TEMPLATE): - value_template.hass = hass - trigger_entity_config = { CONF_NAME: Template(switch_config.get(CONF_NAME, object_id), hass), **{k: v for k, v in switch_config.items() if k in TRIGGER_ENTITY_OPTIONS}, @@ -59,7 +58,7 @@ async def async_setup_platform( switch_config[CONF_COMMAND_ON], switch_config[CONF_COMMAND_OFF], switch_config.get(CONF_COMMAND_STATE), - value_template, + switch_config.get(CONF_VALUE_TEMPLATE), switch_config[CONF_COMMAND_TIMEOUT], switch_config.get(CONF_SCAN_INTERVAL, SCAN_INTERVAL), ) @@ -112,7 +111,7 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity): async def _switch(self, command: str) -> bool: """Execute the actual commands.""" - LOGGER.info("Running command: %s", command) + LOGGER.debug("Running command: %s", command) success = await async_call_shell_with_timeout(command, self._timeout) == 0 @@ -123,12 +122,12 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity): async def _async_query_state_value(self, command: str) -> str | None: """Execute state command for return value.""" - LOGGER.info("Running state value command: %s", command) + LOGGER.debug("Running state value command: %s", command) return await async_check_output_or_log(command, self._timeout) async def _async_query_state_code(self, command: str) -> bool: """Execute state command for return code.""" - LOGGER.info("Running state code command: %s", command) + LOGGER.debug("Running state code command: %s", command) return ( await async_call_shell_with_timeout( command, self._timeout, log_return_code=False @@ -143,11 +142,11 @@ class CommandSwitch(ManualTriggerEntity, SwitchEntity): async def _async_query_state(self) -> str | int | None: """Query for state.""" - if self._command_state: - if self._value_template: - return await self._async_query_state_value(self._command_state) - return await self._async_query_state_code(self._command_state) - return None + if TYPE_CHECKING: + assert self._command_state + if self._value_template: + return await self._async_query_state_value(self._command_state) + return await self._async_query_state_code(self._command_state) async def _update_entity_state(self, now: datetime | None = None) -> None: """Update the state of the entity.""" diff --git a/homeassistant/components/control4/__init__.py b/homeassistant/components/control4/__init__.py index c9a6eab5c62..a3d0cebd1fc 100644 --- a/homeassistant/components/control4/__init__.py +++ b/homeassistant/components/control4/__init__.py @@ -50,7 +50,8 @@ PLATFORMS = [Platform.LIGHT, Platform.MEDIA_PLAYER] async def call_c4_api_retry(func, *func_args): """Call C4 API function and retry on failure.""" - for i in range(API_RETRY_TIMES): + # Ruff doesn't understand this loop - the exception is always raised after the retries + for i in range(API_RETRY_TIMES): # noqa: RET503 try: return await func(*func_args) except client_exceptions.ClientError as exception: diff --git a/homeassistant/components/control4/config_flow.py b/homeassistant/components/control4/config_flow.py index f6d746c9cb4..aa7839b4383 100644 --- a/homeassistant/components/control4/config_flow.py +++ b/homeassistant/components/control4/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from aiohttp.client_exceptions import ClientError from pyControl4.account import C4Account @@ -10,7 +11,12 @@ from pyControl4.director import C4Director from pyControl4.error_handling import NotFound, Unauthorized import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -93,7 +99,9 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: @@ -105,9 +113,9 @@ class Control4ConfigFlow(ConfigFlow, domain=DOMAIN): ) try: if not await hub.authenticate(): - raise InvalidAuth + raise InvalidAuth # noqa: TRY301 if not await hub.connect_to_director(): - raise CannotConnect + raise CannotConnect # noqa: TRY301 except InvalidAuth: errors["base"] = "invalid_auth" except CannotConnect: diff --git a/homeassistant/components/control4/manifest.json b/homeassistant/components/control4/manifest.json index 765f0dce78c..3088ebf8672 100644 --- a/homeassistant/components/control4/manifest.json +++ b/homeassistant/components/control4/manifest.json @@ -6,7 +6,7 @@ "documentation": "https://www.home-assistant.io/integrations/control4", "iot_class": "local_polling", "loggers": ["pyControl4"], - "requirements": ["pyControl4==1.1.0"], + "requirements": ["pyControl4==1.2.0"], "ssdp": [ { "st": "c4:director" diff --git a/homeassistant/components/conversation/default_agent.py b/homeassistant/components/conversation/default_agent.py index 1661d2ad30d..05b4d194d33 100644 --- a/homeassistant/components/conversation/default_agent.py +++ b/homeassistant/components/conversation/default_agent.py @@ -349,6 +349,9 @@ class DefaultAgent(ConversationEntity): } for entity in result.entities_list } + device_area = self._get_device_area(user_input.device_id) + if device_area: + slots["preferred_area_id"] = {"value": device_area.id} async_conversation_trace_append( ConversationTraceEventType.TOOL_CALL, { @@ -917,18 +920,26 @@ class DefaultAgent(ConversationEntity): if not user_input.device_id: return None - devices = dr.async_get(self.hass) - device = devices.async_get(user_input.device_id) - if (device is None) or (device.area_id is None): - return None - - areas = ar.async_get(self.hass) - device_area = areas.async_get_area(device.area_id) + device_area = self._get_device_area(user_input.device_id) if device_area is None: return None return {"area": {"value": device_area.name, "text": device_area.name}} + def _get_device_area(self, device_id: str | None) -> ar.AreaEntry | None: + """Return area object for given device identifier.""" + if device_id is None: + return None + + devices = dr.async_get(self.hass) + device = devices.async_get(device_id) + if (device is None) or (device.area_id is None): + return None + + areas = ar.async_get(self.hass) + + return areas.async_get_area(device.area_id) + def _get_error_text( self, error_key: ErrorKey | str, diff --git a/homeassistant/components/conversation/icons.json b/homeassistant/components/conversation/icons.json index b39a1603b15..658783f9ae2 100644 --- a/homeassistant/components/conversation/icons.json +++ b/homeassistant/components/conversation/icons.json @@ -1,6 +1,10 @@ { "services": { - "process": "mdi:message-processing", - "reload": "mdi:reload" + "process": { + "service": "mdi:message-processing" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/conversation/manifest.json b/homeassistant/components/conversation/manifest.json index d7a308b8b2b..837ac9f9b1f 100644 --- a/homeassistant/components/conversation/manifest.json +++ b/homeassistant/components/conversation/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/conversation", "integration_type": "system", "quality_scale": "internal", - "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.8.7"] + "requirements": ["hassil==1.7.4", "home-assistant-intents==2024.9.4"] } diff --git a/homeassistant/components/conversation/models.py b/homeassistant/components/conversation/models.py index 902b52483e0..724e520e6df 100644 --- a/homeassistant/components/conversation/models.py +++ b/homeassistant/components/conversation/models.py @@ -23,11 +23,22 @@ class ConversationInput: """User input to be processed.""" text: str + """User spoken text.""" + context: Context + """Context of the request.""" + conversation_id: str | None + """Unique identifier for the conversation.""" + device_id: str | None + """Unique identifier for the device.""" + language: str + """Language of the request.""" + agent_id: str | None = None + """Agent to use for processing.""" @dataclass(slots=True) diff --git a/homeassistant/components/coolmaster/manifest.json b/homeassistant/components/coolmaster/manifest.json index 9488e068d44..8775d7f72b8 100644 --- a/homeassistant/components/coolmaster/manifest.json +++ b/homeassistant/components/coolmaster/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/coolmaster", "iot_class": "local_polling", "loggers": ["pycoolmasternet_async"], - "requirements": ["pycoolmasternet-async==0.1.5"] + "requirements": ["pycoolmasternet-async==0.2.2"] } diff --git a/homeassistant/components/counter/__init__.py b/homeassistant/components/counter/__init__.py index 324668a63e2..f0a14aa7951 100644 --- a/homeassistant/components/counter/__init__.py +++ b/homeassistant/components/counter/__init__.py @@ -122,9 +122,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: storage_collection, DOMAIN, DOMAIN, STORAGE_FIELDS, STORAGE_FIELDS ).async_setup(hass) - component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment") - component.async_register_entity_service(SERVICE_DECREMENT, {}, "async_decrement") - component.async_register_entity_service(SERVICE_RESET, {}, "async_reset") + component.async_register_entity_service(SERVICE_INCREMENT, None, "async_increment") + component.async_register_entity_service(SERVICE_DECREMENT, None, "async_decrement") + component.async_register_entity_service(SERVICE_RESET, None, "async_reset") component.async_register_entity_service( SERVICE_SET_VALUE, {vol.Required(VALUE): cv.positive_int}, diff --git a/homeassistant/components/counter/icons.json b/homeassistant/components/counter/icons.json index 1e0ef54bbb7..59cd0bb7121 100644 --- a/homeassistant/components/counter/icons.json +++ b/homeassistant/components/counter/icons.json @@ -1,8 +1,16 @@ { "services": { - "decrement": "mdi:numeric-negative-1", - "increment": "mdi:numeric-positive-1", - "reset": "mdi:refresh", - "set_value": "mdi:counter" + "decrement": { + "service": "mdi:numeric-negative-1" + }, + "increment": { + "service": "mdi:numeric-positive-1" + }, + "reset": { + "service": "mdi:refresh" + }, + "set_value": { + "service": "mdi:counter" + } } } diff --git a/homeassistant/components/cover/__init__.py b/homeassistant/components/cover/__init__.py index 645bd88de7a..90d2b644810 100644 --- a/homeassistant/components/cover/__init__.py +++ b/homeassistant/components/cover/__init__.py @@ -158,11 +158,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_OPEN_COVER, {}, "async_open_cover", [CoverEntityFeature.OPEN] + SERVICE_OPEN_COVER, None, "async_open_cover", [CoverEntityFeature.OPEN] ) component.async_register_entity_service( - SERVICE_CLOSE_COVER, {}, "async_close_cover", [CoverEntityFeature.CLOSE] + SERVICE_CLOSE_COVER, None, "async_close_cover", [CoverEntityFeature.CLOSE] ) component.async_register_entity_service( @@ -177,33 +177,33 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) component.async_register_entity_service( - SERVICE_STOP_COVER, {}, "async_stop_cover", [CoverEntityFeature.STOP] + SERVICE_STOP_COVER, None, "async_stop_cover", [CoverEntityFeature.STOP] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE], ) component.async_register_entity_service( SERVICE_OPEN_COVER_TILT, - {}, + None, "async_open_cover_tilt", [CoverEntityFeature.OPEN_TILT], ) component.async_register_entity_service( SERVICE_CLOSE_COVER_TILT, - {}, + None, "async_close_cover_tilt", [CoverEntityFeature.CLOSE_TILT], ) component.async_register_entity_service( SERVICE_STOP_COVER_TILT, - {}, + None, "async_stop_cover_tilt", [CoverEntityFeature.STOP_TILT], ) @@ -221,7 +221,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_TOGGLE_COVER_TILT, - {}, + None, "async_toggle_tilt", [CoverEntityFeature.OPEN_TILT | CoverEntityFeature.CLOSE_TILT], ) diff --git a/homeassistant/components/cover/icons.json b/homeassistant/components/cover/icons.json index f2edaaa0893..91775fe634d 100644 --- a/homeassistant/components/cover/icons.json +++ b/homeassistant/components/cover/icons.json @@ -78,15 +78,35 @@ } }, "services": { - "close_cover": "mdi:arrow-down-box", - "close_cover_tilt": "mdi:arrow-bottom-left", - "open_cover": "mdi:arrow-up-box", - "open_cover_tilt": "mdi:arrow-top-right", - "set_cover_position": "mdi:arrow-down-box", - "set_cover_tilt_position": "mdi:arrow-top-right", - "stop_cover": "mdi:stop", - "stop_cover_tilt": "mdi:stop", - "toggle": "mdi:arrow-up-down", - "toggle_cover_tilt": "mdi:arrow-top-right-bottom-left" + "close_cover": { + "service": "mdi:arrow-down-box" + }, + "close_cover_tilt": { + "service": "mdi:arrow-bottom-left" + }, + "open_cover": { + "service": "mdi:arrow-up-box" + }, + "open_cover_tilt": { + "service": "mdi:arrow-top-right" + }, + "set_cover_position": { + "service": "mdi:arrow-down-box" + }, + "set_cover_tilt_position": { + "service": "mdi:arrow-top-right" + }, + "stop_cover": { + "service": "mdi:stop" + }, + "stop_cover_tilt": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:arrow-up-down" + }, + "toggle_cover_tilt": { + "service": "mdi:arrow-top-right-bottom-left" + } } } diff --git a/homeassistant/components/currencylayer/sensor.py b/homeassistant/components/currencylayer/sensor.py index 2ad0f88a2ab..01dec10efe0 100644 --- a/homeassistant/components/currencylayer/sensor.py +++ b/homeassistant/components/currencylayer/sensor.py @@ -108,7 +108,7 @@ class CurrencylayerData: try: result = requests.get(self._resource, params=self._parameters, timeout=10) if "error" in result.json(): - raise ValueError(result.json()["error"]["info"]) + raise ValueError(result.json()["error"]["info"]) # noqa: TRY301 self.data = result.json()["quotes"] _LOGGER.debug("Currencylayer data updated: %s", result.json()["timestamp"]) except ValueError as err: diff --git a/homeassistant/components/daikin/__init__.py b/homeassistant/components/daikin/__init__.py index 1bd833f354d..4da6bcee50b 100644 --- a/homeassistant/components/daikin/__init__.py +++ b/homeassistant/components/daikin/__init__.py @@ -23,7 +23,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.util import Throttle @@ -36,8 +35,6 @@ MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60) PLATFORMS = [Platform.CLIMATE, Platform.SENSOR, Platform.SWITCH] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Establish connection with Daikin.""" diff --git a/homeassistant/components/daikin/manifest.json b/homeassistant/components/daikin/manifest.json index 0d93c0e25ad..88c29a20435 100644 --- a/homeassistant/components/daikin/manifest.json +++ b/homeassistant/components/daikin/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/daikin", "iot_class": "local_polling", "loggers": ["pydaikin"], - "requirements": ["pydaikin==2.13.4"], + "requirements": ["pydaikin==2.13.6"], "zeroconf": ["_dkapi._tcp.local."] } diff --git a/homeassistant/components/date/icons.json b/homeassistant/components/date/icons.json index 80ec2691285..b139b897210 100644 --- a/homeassistant/components/date/icons.json +++ b/homeassistant/components/date/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:calendar-edit" + "set_value": { + "service": "mdi:calendar-edit" + } } } diff --git a/homeassistant/components/datetime/icons.json b/homeassistant/components/datetime/icons.json index 563d03e2a8f..d7e9fca8e5c 100644 --- a/homeassistant/components/datetime/icons.json +++ b/homeassistant/components/datetime/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:calendar-edit" + "set_value": { + "service": "mdi:calendar-edit" + } } } diff --git a/homeassistant/components/debugpy/icons.json b/homeassistant/components/debugpy/icons.json index b3bb4dde23a..88086382059 100644 --- a/homeassistant/components/debugpy/icons.json +++ b/homeassistant/components/debugpy/icons.json @@ -1,5 +1,7 @@ { "services": { - "start": "mdi:play" + "start": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/deconz/icons.json b/homeassistant/components/deconz/icons.json index 5b22daee53f..a7fb0859eec 100644 --- a/homeassistant/components/deconz/icons.json +++ b/homeassistant/components/deconz/icons.json @@ -1,7 +1,13 @@ { "services": { - "configure": "mdi:cog", - "device_refresh": "mdi:refresh", - "remove_orphaned_entries": "mdi:bookmark-remove" + "configure": { + "service": "mdi:cog" + }, + "device_refresh": { + "service": "mdi:refresh" + }, + "remove_orphaned_entries": { + "service": "mdi:bookmark-remove" + } } } diff --git a/homeassistant/components/deconz/select.py b/homeassistant/components/deconz/select.py index dad3ba9d78d..7f3f8cca060 100644 --- a/homeassistant/components/deconz/select.py +++ b/homeassistant/components/deconz/select.py @@ -3,6 +3,7 @@ from __future__ import annotations from pydeconz.models.event import EventType +from pydeconz.models.sensor.air_purifier import AirPurifier, AirPurifierFanMode from pydeconz.models.sensor.presence import ( Presence, PresenceConfigDeviceMode, @@ -36,6 +37,17 @@ async def async_setup_entry( hub = DeconzHub.get_hub(hass, config_entry) hub.entities[DOMAIN] = set() + @callback + def async_add_air_purifier_sensor(_: EventType, sensor_id: str) -> None: + """Add air purifier select entity from deCONZ.""" + sensor = hub.api.sensors.air_purifier[sensor_id] + async_add_entities([DeconzAirPurifierFanMode(sensor, hub)]) + + hub.register_platform_add_device_callback( + async_add_air_purifier_sensor, + hub.api.sensors.air_purifier, + ) + @callback def async_add_presence_sensor(_: EventType, sensor_id: str) -> None: """Add presence select entity from deCONZ.""" @@ -55,6 +67,39 @@ async def async_setup_entry( ) +class DeconzAirPurifierFanMode(DeconzDevice[AirPurifier], SelectEntity): + """Representation of a deCONZ air purifier fan mode entity.""" + + _name_suffix = "Fan Mode" + unique_id_suffix = "fan_mode" + _update_key = "mode" + + _attr_entity_category = EntityCategory.CONFIG + _attr_options = [ + AirPurifierFanMode.OFF.value, + AirPurifierFanMode.AUTO.value, + AirPurifierFanMode.SPEED_1.value, + AirPurifierFanMode.SPEED_2.value, + AirPurifierFanMode.SPEED_3.value, + AirPurifierFanMode.SPEED_4.value, + AirPurifierFanMode.SPEED_5.value, + ] + + TYPE = DOMAIN + + @property + def current_option(self) -> str: + """Return the selected entity option to represent the entity state.""" + return self._device.fan_mode.value + + async def async_select_option(self, option: str) -> None: + """Change the selected option.""" + await self.hub.api.sensors.air_purifier.set_config( + id=self._device.resource_id, + fan_mode=AirPurifierFanMode(option), + ) + + class DeconzPresenceDeviceModeSelect(DeconzDevice[Presence], SelectEntity): """Representation of a deCONZ presence device mode entity.""" diff --git a/homeassistant/components/demo/__init__.py b/homeassistant/components/demo/__init__.py index 371b783b653..d088dfb140b 100644 --- a/homeassistant/components/demo/__init__.py +++ b/homeassistant/components/demo/__init__.py @@ -55,7 +55,6 @@ COMPONENTS_WITH_CONFIG_ENTRY_DEMO_PLATFORM = [ COMPONENTS_WITH_DEMO_PLATFORM = [ Platform.TTS, - Platform.MAILBOX, Platform.IMAGE_PROCESSING, Platform.DEVICE_TRACKER, ] diff --git a/homeassistant/components/demo/alarm_control_panel.py b/homeassistant/components/demo/alarm_control_panel.py index f95042f2cc7..f9b791668e8 100644 --- a/homeassistant/components/demo/alarm_control_panel.py +++ b/homeassistant/components/demo/alarm_control_panel.py @@ -30,9 +30,10 @@ async def async_setup_entry( """Set up the Demo config entry.""" async_add_entities( [ - DemoAlarm( # type:ignore[no-untyped-call] + ManualAlarm( hass, "Security", + "demo_alarm_control_panel", "1234", None, True, @@ -74,9 +75,3 @@ async def async_setup_entry( ) ] ) - - -class DemoAlarm(ManualAlarm): - """Demo Alarm Control Panel.""" - - _attr_unique_id = "demo_alarm_control_panel" diff --git a/homeassistant/components/demo/config_flow.py b/homeassistant/components/demo/config_flow.py index 468d9cb042b..c866873732c 100644 --- a/homeassistant/components/demo/config_flow.py +++ b/homeassistant/components/demo/config_flow.py @@ -37,12 +37,12 @@ class DemoConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Demo", data=import_info) + return self.async_create_entry(title="Demo", data=import_data) class OptionsFlowHandler(OptionsFlow): diff --git a/homeassistant/components/demo/icons.json b/homeassistant/components/demo/icons.json index d9e1d405490..17425a6d119 100644 --- a/homeassistant/components/demo/icons.json +++ b/homeassistant/components/demo/icons.json @@ -75,6 +75,8 @@ } }, "services": { - "randomize_device_tracker_data": "mdi:dice-multiple" + "randomize_device_tracker_data": { + "service": "mdi:dice-multiple" + } } } diff --git a/homeassistant/components/demo/mailbox.py b/homeassistant/components/demo/mailbox.py deleted file mode 100644 index e0cdd05782d..00000000000 --- a/homeassistant/components/demo/mailbox.py +++ /dev/null @@ -1,95 +0,0 @@ -"""Support for a demo mailbox.""" - -from __future__ import annotations - -from hashlib import sha1 -import logging -import os -from typing import Any - -from homeassistant.components.mailbox import CONTENT_TYPE_MPEG, Mailbox, StreamError -from homeassistant.core import HomeAssistant -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util import dt as dt_util - -_LOGGER = logging.getLogger(__name__) - -MAILBOX_NAME = "DemoMailbox" - - -async def async_get_handler( - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, -) -> Mailbox: - """Set up the Demo mailbox.""" - return DemoMailbox(hass, MAILBOX_NAME) - - -class DemoMailbox(Mailbox): - """Demo Mailbox.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Demo mailbox.""" - super().__init__(hass, name) - self._messages: dict[str, dict[str, Any]] = {} - txt = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - for idx in range(10): - msgtime = int( - dt_util.as_timestamp(dt_util.utcnow()) - 3600 * 24 * (10 - idx) - ) - msgtxt = f"Message {idx + 1}. {txt * (1 + idx * (idx % 2))}" - msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() - msg = { - "info": { - "origtime": msgtime, - "callerid": "John Doe <212-555-1212>", - "duration": "10", - }, - "text": msgtxt, - "sha": msgsha, - } - self._messages[msgsha] = msg - - @property - def media_type(self) -> str: - """Return the supported media type.""" - return CONTENT_TYPE_MPEG - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return True - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return True - - def _get_media(self) -> bytes: - """Return the media blob for the msgid.""" - audio_path = os.path.join(os.path.dirname(__file__), "tts.mp3") - with open(audio_path, "rb") as file: - return file.read() - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - if msgid not in self._messages: - raise StreamError("Message not found") - return await self.hass.async_add_executor_job(self._get_media) - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - return sorted( - self._messages.values(), - key=lambda item: item["info"]["origtime"], - reverse=True, - ) - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - if msgid in self._messages: - _LOGGER.info("Deleting: %s", msgid) - del self._messages[msgid] - self.async_update() - return True diff --git a/homeassistant/components/denonavr/icons.json b/homeassistant/components/denonavr/icons.json index ec6bc0854f9..33d7f1bd3d9 100644 --- a/homeassistant/components/denonavr/icons.json +++ b/homeassistant/components/denonavr/icons.json @@ -1,7 +1,13 @@ { "services": { - "get_command": "mdi:console", - "set_dynamic_eq": "mdi:tune", - "update_audyssey": "mdi:waveform" + "get_command": { + "service": "mdi:console" + }, + "set_dynamic_eq": { + "service": "mdi:tune" + }, + "update_audyssey": { + "service": "mdi:waveform" + } } } diff --git a/homeassistant/components/denonavr/media_player.py b/homeassistant/components/denonavr/media_player.py index 8d6df72a67e..a7d8565d6a4 100644 --- a/homeassistant/components/denonavr/media_player.py +++ b/homeassistant/components/denonavr/media_player.py @@ -152,7 +152,7 @@ async def async_setup_entry( ) platform.async_register_entity_service( SERVICE_UPDATE_AUDYSSEY, - {}, + None, f"async_{SERVICE_UPDATE_AUDYSSEY}", ) diff --git a/homeassistant/components/device_tracker/icons.json b/homeassistant/components/device_tracker/icons.json index c89053701ba..4e5b82576cf 100644 --- a/homeassistant/components/device_tracker/icons.json +++ b/homeassistant/components/device_tracker/icons.json @@ -8,6 +8,8 @@ } }, "services": { - "see": "mdi:account-eye" + "see": { + "service": "mdi:account-eye" + } } } diff --git a/homeassistant/components/device_tracker/legacy.py b/homeassistant/components/device_tracker/legacy.py index ac168c06fb1..15cb67f5ee8 100644 --- a/homeassistant/components/device_tracker/legacy.py +++ b/homeassistant/components/device_tracker/legacy.py @@ -350,7 +350,7 @@ class DeviceTrackerPlatform: discovery_info, ) else: - raise HomeAssistantError("Invalid legacy device_tracker platform.") + raise HomeAssistantError("Invalid legacy device_tracker platform.") # noqa: TRY301 if scanner is not None: async_setup_scanner_platform( diff --git a/homeassistant/components/devolo_home_network/config_flow.py b/homeassistant/components/devolo_home_network/config_flow.py index 63d86d46e8a..fca72471693 100644 --- a/homeassistant/components/devolo_home_network/config_flow.py +++ b/homeassistant/components/devolo_home_network/config_flow.py @@ -112,10 +112,12 @@ class DevoloHomeNetworkConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={"host_name": title}, ) - async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauthentication.""" if entry := self.hass.config_entries.async_get_entry(self.context["entry_id"]): - self.context[CONF_HOST] = data[CONF_IP_ADDRESS] + self.context[CONF_HOST] = entry_data[CONF_IP_ADDRESS] self.context["title_placeholders"][PRODUCT] = ( entry.runtime_data.device.product ) diff --git a/homeassistant/components/dexcom/config_flow.py b/homeassistant/components/dexcom/config_flow.py index 19b35c2b03d..17bd1b3f7a8 100644 --- a/homeassistant/components/dexcom/config_flow.py +++ b/homeassistant/components/dexcom/config_flow.py @@ -2,10 +2,17 @@ from __future__ import annotations +from typing import Any + from pydexcom import AccountError, Dexcom, SessionError import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_PASSWORD, CONF_UNIT_OF_MEASUREMENT, CONF_USERNAME from homeassistant.core import callback @@ -25,7 +32,9 @@ class DexcomConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/dhcp/__init__.py b/homeassistant/components/dhcp/__init__.py index e830de39f29..0897729ec72 100644 --- a/homeassistant/components/dhcp/__init__.py +++ b/homeassistant/components/dhcp/__init__.py @@ -63,7 +63,6 @@ from .const import DOMAIN CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -FILTER = "udp and (port 67 or 68)" HOSTNAME: Final = "hostname" MAC_ADDRESS: Final = "macaddress" IP_ADDRESS: Final = "ip" diff --git a/homeassistant/components/dhcp/manifest.json b/homeassistant/components/dhcp/manifest.json index ff81540b0ea..6023e55faf3 100644 --- a/homeassistant/components/dhcp/manifest.json +++ b/homeassistant/components/dhcp/manifest.json @@ -16,6 +16,6 @@ "requirements": [ "aiodhcpwatcher==1.0.2", "aiodiscover==2.1.0", - "cached_ipaddress==0.3.0" + "cached-ipaddress==0.5.0" ] } diff --git a/homeassistant/components/directv/__init__.py b/homeassistant/components/directv/__init__.py index 50eb6bc7959..e59fa4e9d0d 100644 --- a/homeassistant/components/directv/__init__.py +++ b/homeassistant/components/directv/__init__.py @@ -10,13 +10,10 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] SCAN_INTERVAL = timedelta(seconds=30) diff --git a/homeassistant/components/directv/config_flow.py b/homeassistant/components/directv/config_flow.py index 7cdfd5c07c9..56d8f262d1c 100644 --- a/homeassistant/components/directv/config_flow.py +++ b/homeassistant/components/directv/config_flow.py @@ -40,9 +40,9 @@ class DirecTVConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Set up the instance.""" - self.discovery_info = {} + self.discovery_info: dict[str, Any] = {} async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/dominos/icons.json b/homeassistant/components/dominos/icons.json index d88bfb2542f..ca33ac91dfd 100644 --- a/homeassistant/components/dominos/icons.json +++ b/homeassistant/components/dominos/icons.json @@ -1,5 +1,7 @@ { "services": { - "order": "mdi:pizza" + "order": { + "service": "mdi:pizza" + } } } diff --git a/homeassistant/components/doods/image_processing.py b/homeassistant/components/doods/image_processing.py index 7ffb6655bb6..acd9d7fe71b 100644 --- a/homeassistant/components/doods/image_processing.py +++ b/homeassistant/components/doods/image_processing.py @@ -207,8 +207,6 @@ class Doods(ImageProcessingEntity): ] self._covers = area_config[CONF_COVERS] - template.attach(hass, self._file_out) - self._dconfig = dconfig self._matches = {} self._total_matches = 0 diff --git a/homeassistant/components/doorbird/__init__.py b/homeassistant/components/doorbird/__init__.py index 113b8031d9b..c943fa68766 100644 --- a/homeassistant/components/doorbird/__init__.py +++ b/homeassistant/components/doorbird/__init__.py @@ -29,7 +29,7 @@ from .view import DoorBirdRequestView CONF_CUSTOM_URL = "hass_url_override" -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/downloader/config_flow.py b/homeassistant/components/downloader/config_flow.py index e7191e055a6..61a7ba8fe52 100644 --- a/homeassistant/components/downloader/config_flow.py +++ b/homeassistant/components/downloader/config_flow.py @@ -43,13 +43,13 @@ class DownloaderConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initiated by configuration file.""" try: - await self._validate_input(user_input) + await self._validate_input(import_data) except DirectoryDoesNotExist: return self.async_abort(reason="directory_does_not_exist") - return self.async_create_entry(title=DEFAULT_NAME, data=user_input) + return self.async_create_entry(title=DEFAULT_NAME, data=import_data) async def _validate_input(self, user_input: dict[str, Any]) -> None: """Validate the user input if the directory exists.""" diff --git a/homeassistant/components/downloader/icons.json b/homeassistant/components/downloader/icons.json index 2a78df93ca7..8f8b5bb2688 100644 --- a/homeassistant/components/downloader/icons.json +++ b/homeassistant/components/downloader/icons.json @@ -1,5 +1,7 @@ { "services": { - "download_file": "mdi:download" + "download_file": { + "service": "mdi:download" + } } } diff --git a/homeassistant/components/drop_connect/binary_sensor.py b/homeassistant/components/drop_connect/binary_sensor.py index 73e0e254607..093c5bcbb8e 100644 --- a/homeassistant/components/drop_connect/binary_sensor.py +++ b/homeassistant/components/drop_connect/binary_sensor.py @@ -17,6 +17,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( CONF_DEVICE_TYPE, + DEV_ALERT, DEV_HUB, DEV_LEAK_DETECTOR, DEV_PROTECTION_VALVE, @@ -33,8 +34,10 @@ _LOGGER = logging.getLogger(__name__) # Binary sensor type constants +ALERT_SENSOR = "alert_sensor" LEAK_DETECTED = "leak" PENDING_NOTIFICATION = "pending_notification" +POWER = "power" PUMP_STATUS = "pump" RESERVE_IN_USE = "reserve_in_use" SALT_LOW = "salt" @@ -74,10 +77,23 @@ BINARY_SENSORS: list[DROPBinarySensorEntityDescription] = [ translation_key=PUMP_STATUS, value_fn=lambda device: device.drop_api.pump_status(), ), + DROPBinarySensorEntityDescription( + key=ALERT_SENSOR, + translation_key=ALERT_SENSOR, + device_class=BinarySensorDeviceClass.PROBLEM, + value_fn=lambda device: device.drop_api.sensor_high(), + ), + DROPBinarySensorEntityDescription( + key=POWER, + translation_key=None, # Use name provided by binary sensor device class + device_class=BinarySensorDeviceClass.POWER, + value_fn=lambda device: device.drop_api.power(), + ), ] # Defines which binary sensors are used by each device type DEVICE_BINARY_SENSORS: dict[str, list[str]] = { + DEV_ALERT: [ALERT_SENSOR, POWER], DEV_HUB: [LEAK_DETECTED, PENDING_NOTIFICATION], DEV_LEAK_DETECTOR: [LEAK_DETECTED], DEV_PROTECTION_VALVE: [LEAK_DETECTED], diff --git a/homeassistant/components/drop_connect/const.py b/homeassistant/components/drop_connect/const.py index 38a8a57ea72..f1012f9652c 100644 --- a/homeassistant/components/drop_connect/const.py +++ b/homeassistant/components/drop_connect/const.py @@ -11,6 +11,7 @@ CONF_DEVICE_NAME = "name" CONF_DEVICE_OWNER_ID = "drop_device_owner_id" # Values for DROP device types +DEV_ALERT = "alrt" DEV_FILTER = "filt" DEV_HUB = "hub" DEV_LEAK_DETECTOR = "leak" diff --git a/homeassistant/components/drop_connect/sensor.py b/homeassistant/components/drop_connect/sensor.py index 0806737254e..ad123ee13c7 100644 --- a/homeassistant/components/drop_connect/sensor.py +++ b/homeassistant/components/drop_connect/sensor.py @@ -27,6 +27,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .const import ( CONF_DEVICE_TYPE, + DEV_ALERT, DEV_FILTER, DEV_HUB, DEV_LEAK_DETECTOR, @@ -222,6 +223,7 @@ DEVICE_SENSORS: dict[str, list[str]] = { ], DEV_FILTER: [BATTERY, CURRENT_FLOW_RATE, CURRENT_SYSTEM_PRESSURE], DEV_LEAK_DETECTOR: [BATTERY, TEMPERATURE], + DEV_ALERT: [BATTERY, TEMPERATURE], DEV_PROTECTION_VALVE: [ BATTERY, CURRENT_FLOW_RATE, diff --git a/homeassistant/components/drop_connect/strings.json b/homeassistant/components/drop_connect/strings.json index 761d134bd18..93df4dc3310 100644 --- a/homeassistant/components/drop_connect/strings.json +++ b/homeassistant/components/drop_connect/strings.json @@ -12,26 +12,27 @@ }, "entity": { "sensor": { - "current_flow_rate": { "name": "Water flow rate" }, - "peak_flow_rate": { "name": "Peak water flow rate today" }, - "water_used_today": { "name": "Total water used today" }, "average_water_used": { "name": "Average daily water usage" }, "capacity_remaining": { "name": "Capacity remaining" }, - "current_system_pressure": { "name": "Current water pressure" }, - "high_system_pressure": { "name": "High water pressure today" }, - "low_system_pressure": { "name": "Low water pressure today" }, - "inlet_tds": { "name": "Inlet TDS" }, - "outlet_tds": { "name": "Outlet TDS" }, "cart1": { "name": "Cartridge 1 life remaining" }, "cart2": { "name": "Cartridge 2 life remaining" }, - "cart3": { "name": "Cartridge 3 life remaining" } + "cart3": { "name": "Cartridge 3 life remaining" }, + "current_flow_rate": { "name": "Water flow rate" }, + "current_system_pressure": { "name": "Current water pressure" }, + "high_system_pressure": { "name": "High water pressure today" }, + "inlet_tds": { "name": "Inlet TDS" }, + "low_system_pressure": { "name": "Low water pressure today" }, + "outlet_tds": { "name": "Outlet TDS" }, + "peak_flow_rate": { "name": "Peak water flow rate today" }, + "water_used_today": { "name": "Total water used today" } }, "binary_sensor": { + "alert_sensor": { "name": "Sensor" }, "leak": { "name": "Leak detected" }, "pending_notification": { "name": "Notification unread" }, + "pump": { "name": "Pump status" }, "reserve_in_use": { "name": "Reserve capacity in use" }, - "salt": { "name": "Salt low" }, - "pump": { "name": "Pump status" } + "salt": { "name": "Salt low" } }, "select": { "protect_mode": { @@ -44,8 +45,8 @@ } }, "switch": { - "water": { "name": "Water supply" }, - "bypass": { "name": "Treatment bypass" } + "bypass": { "name": "Treatment bypass" }, + "water": { "name": "Water supply" } } } } diff --git a/homeassistant/components/dsmr/manifest.json b/homeassistant/components/dsmr/manifest.json index 5490b2a6503..561f06d1bbe 100644 --- a/homeassistant/components/dsmr/manifest.json +++ b/homeassistant/components/dsmr/manifest.json @@ -1,7 +1,7 @@ { "domain": "dsmr", "name": "DSMR Smart Meter", - "codeowners": ["@Robbie1221", "@frenck"], + "codeowners": ["@Robbie1221"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/dsmr", "integration_type": "hub", diff --git a/homeassistant/components/duckdns/icons.json b/homeassistant/components/duckdns/icons.json index 79ec18d13ff..c5d0b5329dc 100644 --- a/homeassistant/components/duckdns/icons.json +++ b/homeassistant/components/duckdns/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_txt": "mdi:text-box-edit-outline" + "set_txt": { + "service": "mdi:text-box-edit-outline" + } } } diff --git a/homeassistant/components/dunehd/config_flow.py b/homeassistant/components/dunehd/config_flow.py index 8a0f3eec4a0..33ffd4a812a 100644 --- a/homeassistant/components/dunehd/config_flow.py +++ b/homeassistant/components/dunehd/config_flow.py @@ -39,7 +39,7 @@ class DuneHDConfigFlow(ConfigFlow, domain=DOMAIN): try: if self.host_already_configured(host): - raise AlreadyConfigured + raise AlreadyConfigured # noqa: TRY301 await self.init_device(host) except CannotConnect: errors[CONF_HOST] = "cannot_connect" diff --git a/homeassistant/components/dweet/sensor.py b/homeassistant/components/dweet/sensor.py index 01e0567ac8d..10109189eb0 100644 --- a/homeassistant/components/dweet/sensor.py +++ b/homeassistant/components/dweet/sensor.py @@ -51,8 +51,6 @@ def setup_platform( device = config.get(CONF_DEVICE) value_template = config.get(CONF_VALUE_TEMPLATE) unit = config.get(CONF_UNIT_OF_MEASUREMENT) - if value_template is not None: - value_template.hass = hass try: content = json.dumps(dweepy.get_latest_dweet_for(device)[0]["content"]) @@ -60,7 +58,7 @@ def setup_platform( _LOGGER.error("Device/thing %s could not be found", device) return - if value_template.render_with_possible_json_value(content) == "": + if value_template and value_template.render_with_possible_json_value(content) == "": _LOGGER.error("%s was not found", value_template) return diff --git a/homeassistant/components/dynalite/config_flow.py b/homeassistant/components/dynalite/config_flow.py index 3ae4828b668..928f7043a49 100644 --- a/homeassistant/components/dynalite/config_flow.py +++ b/homeassistant/components/dynalite/config_flow.py @@ -26,9 +26,9 @@ class DynaliteFlowHandler(ConfigFlow, domain=DOMAIN): """Initialize the Dynalite flow.""" self.host = None - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a new bridge as a config entry.""" - LOGGER.debug("Starting async_step_import (deprecated) - %s", import_info) + LOGGER.debug("Starting async_step_import (deprecated) - %s", import_data) # Raise an issue that this is deprecated and has been imported async_create_issue( self.hass, @@ -46,17 +46,17 @@ class DynaliteFlowHandler(ConfigFlow, domain=DOMAIN): }, ) - host = import_info[CONF_HOST] + host = import_data[CONF_HOST] # Check if host already exists for entry in self._async_current_entries(): if entry.data[CONF_HOST] == host: self.hass.config_entries.async_update_entry( - entry, data=dict(import_info) + entry, data=dict(import_data) ) return self.async_abort(reason="already_configured") # New entry - return await self._try_create(import_info) + return await self._try_create(import_data) async def async_step_user( self, user_input: dict[str, Any] | None = None diff --git a/homeassistant/components/dynalite/icons.json b/homeassistant/components/dynalite/icons.json index dedbb1be3ac..27949197b53 100644 --- a/homeassistant/components/dynalite/icons.json +++ b/homeassistant/components/dynalite/icons.json @@ -1,6 +1,10 @@ { "services": { - "request_area_preset": "mdi:texture-box", - "request_channel_level": "mdi:satellite-uplink" + "request_area_preset": { + "service": "mdi:texture-box" + }, + "request_channel_level": { + "service": "mdi:satellite-uplink" + } } } diff --git a/homeassistant/components/eafm/config_flow.py b/homeassistant/components/eafm/config_flow.py index 0345d2acf94..6be1066575f 100644 --- a/homeassistant/components/eafm/config_flow.py +++ b/homeassistant/components/eafm/config_flow.py @@ -1,9 +1,11 @@ """Config flow to configure flood monitoring gauges.""" +from typing import Any + from aioeafm import get_stations import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN @@ -14,21 +16,23 @@ class UKFloodsFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Handle a UK Floods config flow.""" - self.stations = {} + self.stations: dict[str, str] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: - station = self.stations[user_input["station"]] - await self.async_set_unique_id(station, raise_on_progress=False) + selected_station = self.stations[user_input["station"]] + await self.async_set_unique_id(selected_station, raise_on_progress=False) self._abort_if_unique_id_configured() return self.async_create_entry( title=user_input["station"], - data={"station": station}, + data={"station": selected_station}, ) session = async_get_clientsession(hass=self.hass) diff --git a/homeassistant/components/easyenergy/icons.json b/homeassistant/components/easyenergy/icons.json index 90cbec17a65..501483eb932 100644 --- a/homeassistant/components/easyenergy/icons.json +++ b/homeassistant/components/easyenergy/icons.json @@ -13,8 +13,14 @@ } }, "services": { - "get_gas_prices": "mdi:gas-station", - "get_energy_usage_prices": "mdi:transmission-tower-import", - "get_energy_return_prices": "mdi:transmission-tower-export" + "get_gas_prices": { + "service": "mdi:gas-station" + }, + "get_energy_usage_prices": { + "service": "mdi:transmission-tower-import" + }, + "get_energy_return_prices": { + "service": "mdi:transmission-tower-export" + } } } diff --git a/homeassistant/components/ebusd/icons.json b/homeassistant/components/ebusd/icons.json index 642be37a43b..ebfa3673a0c 100644 --- a/homeassistant/components/ebusd/icons.json +++ b/homeassistant/components/ebusd/icons.json @@ -1,5 +1,7 @@ { "services": { - "write": "mdi:pencil" + "write": { + "service": "mdi:pencil" + } } } diff --git a/homeassistant/components/ecobee/config_flow.py b/homeassistant/components/ecobee/config_flow.py index dd5c2c62c85..c0d4d9b03fc 100644 --- a/homeassistant/components/ecobee/config_flow.py +++ b/homeassistant/components/ecobee/config_flow.py @@ -1,5 +1,7 @@ """Config flow to configure ecobee.""" +from typing import Any + from pyecobee import ( ECOBEE_API_KEY, ECOBEE_CONFIG_FILENAME, @@ -8,7 +10,7 @@ from pyecobee import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.exceptions import HomeAssistantError from homeassistant.util.json import load_json_object @@ -23,9 +25,11 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize the ecobee flow.""" - self._ecobee = None + self._ecobee: Ecobee | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if self._async_current_entries(): # Config entry already exists, only one allowed. @@ -76,7 +80,7 @@ class EcobeeFlowHandler(ConfigFlow, domain=DOMAIN): description_placeholders={"pin": self._ecobee.pin}, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Import ecobee config from configuration.yaml. Triggered by async_setup only if a config entry doesn't already exist. diff --git a/homeassistant/components/ecobee/icons.json b/homeassistant/components/ecobee/icons.json index 3e736d0dc68..f24f1f7cfe5 100644 --- a/homeassistant/components/ecobee/icons.json +++ b/homeassistant/components/ecobee/icons.json @@ -1,11 +1,25 @@ { "services": { - "create_vacation": "mdi:umbrella-beach", - "delete_vacation": "mdi:umbrella-beach-outline", - "resume_program": "mdi:play", - "set_fan_min_on_time": "mdi:fan-clock", - "set_dst_mode": "mdi:sun-clock", - "set_mic_mode": "mdi:microphone", - "set_occupancy_modes": "mdi:eye-settings" + "create_vacation": { + "service": "mdi:umbrella-beach" + }, + "delete_vacation": { + "service": "mdi:umbrella-beach-outline" + }, + "resume_program": { + "service": "mdi:play" + }, + "set_fan_min_on_time": { + "service": "mdi:fan-clock" + }, + "set_dst_mode": { + "service": "mdi:sun-clock" + }, + "set_mic_mode": { + "service": "mdi:microphone" + }, + "set_occupancy_modes": { + "service": "mdi:eye-settings" + } } } diff --git a/homeassistant/components/econet/config_flow.py b/homeassistant/components/econet/config_flow.py index 81a5fdf75f0..145b9cf9f7d 100644 --- a/homeassistant/components/econet/config_flow.py +++ b/homeassistant/components/econet/config_flow.py @@ -1,10 +1,12 @@ """Config flow to configure the EcoNet component.""" +from typing import Any + from pyeconet import EcoNetApiInterface from pyeconet.errors import InvalidCredentialsError, PyeconetError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from .const import DOMAIN @@ -24,7 +26,9 @@ class EcoNetFlowHandler(ConfigFlow, domain=DOMAIN): } ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" if not user_input: return self.async_show_form( diff --git a/homeassistant/components/ecovacs/__init__.py b/homeassistant/components/ecovacs/__init__.py index d13a337057d..f8abf87ef27 100644 --- a/homeassistant/components/ecovacs/__init__.py +++ b/homeassistant/components/ecovacs/__init__.py @@ -1,31 +1,13 @@ """Support for Ecovacs Deebot vacuums.""" from sucks import VacBot -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import CONF_COUNTRY, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.typing import ConfigType -from .const import CONF_CONTINENT, DOMAIN from .controller import EcovacsController -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_COUNTRY): vol.All(vol.Lower, cv.string), - vol.Required(CONF_CONTINENT): vol.All(vol.Lower, cv.string), - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - PLATFORMS = [ Platform.BINARY_SENSOR, Platform.BUTTON, @@ -41,17 +23,6 @@ PLATFORMS = [ type EcovacsConfigEntry = ConfigEntry[EcovacsController] -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Ecovacs component.""" - if DOMAIN in config: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config[DOMAIN] - ) - ) - return True - - async def async_setup_entry(hass: HomeAssistant, entry: EcovacsConfigEntry) -> bool: """Set up this integration using UI.""" controller = EcovacsController(hass, entry.data) diff --git a/homeassistant/components/ecovacs/config_flow.py b/homeassistant/components/ecovacs/config_flow.py index fa078bb02ef..2637dbbddf8 100644 --- a/homeassistant/components/ecovacs/config_flow.py +++ b/homeassistant/components/ecovacs/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from functools import partial import logging import ssl -from typing import Any, cast +from typing import Any from urllib.parse import urlparse from aiohttp import ClientError @@ -14,21 +14,16 @@ from deebot_client.const import UNDEFINED, UndefinedType from deebot_client.exceptions import InvalidAuthenticationError, MqttError from deebot_client.mqtt_client import MqttClient, create_mqtt_config from deebot_client.util import md5 -from deebot_client.util.continents import COUNTRIES_TO_CONTINENTS, get_continent import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_COUNTRY, CONF_MODE, CONF_PASSWORD, CONF_USERNAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import AbortFlow +from homeassistant.core import HomeAssistant from homeassistant.helpers import aiohttp_client, selector -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import VolDictType -from homeassistant.loader import async_get_issue_tracker from homeassistant.util.ssl import get_default_no_verify_context from .const import ( - CONF_CONTINENT, CONF_OVERRIDE_MQTT_URL, CONF_OVERRIDE_REST_URL, CONF_VERIFY_MQTT_CERTIFICATE, @@ -222,98 +217,3 @@ class EcovacsConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, last_step=True, ) - - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import configuration from yaml.""" - - def create_repair( - error: str | None = None, placeholders: dict[str, Any] | None = None - ) -> None: - if placeholders is None: - placeholders = {} - if error: - async_create_issue( - self.hass, - DOMAIN, - f"deprecated_yaml_import_issue_{error}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{error}", - translation_placeholders=placeholders - | {"url": "/config/integrations/dashboard/add?domain=ecovacs"}, - ) - else: - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders=placeholders - | { - "domain": DOMAIN, - "integration_title": "Ecovacs", - }, - ) - - # We need to validate the imported country and continent - # as the YAML configuration allows any string for them. - # The config flow allows only valid alpha-2 country codes - # through the CountrySelector. - # The continent will be calculated with the function get_continent - # from the country code and there is no need to specify the continent anymore. - # As the YAML configuration includes the continent, - # we check if both the entered continent and the calculated continent match. - # If not we will inform the user about the mismatch. - error = None - placeholders = None - - # Convert the country to upper case as ISO 3166-1 alpha-2 country codes are upper case - user_input[CONF_COUNTRY] = user_input[CONF_COUNTRY].upper() - - if len(user_input[CONF_COUNTRY]) != 2: - error = "invalid_country_length" - placeholders = {"countries_url": "https://www.iso.org/obp/ui/#search/code/"} - elif len(user_input[CONF_CONTINENT]) != 2: - error = "invalid_continent_length" - placeholders = { - "continent_list": ",".join( - sorted(set(COUNTRIES_TO_CONTINENTS.values())) - ) - } - elif user_input[CONF_CONTINENT].lower() != ( - continent := get_continent(user_input[CONF_COUNTRY]) - ): - error = "continent_not_match" - placeholders = { - "continent": continent, - "github_issue_url": cast( - str, async_get_issue_tracker(self.hass, integration_domain=DOMAIN) - ), - } - - if error: - create_repair(error, placeholders) - return self.async_abort(reason=error) - - # Remove the continent from the user input as it is not needed anymore - user_input.pop(CONF_CONTINENT) - try: - result = await self.async_step_auth(user_input) - except AbortFlow as ex: - if ex.reason == "already_configured": - create_repair() - raise - - if errors := result.get("errors"): - error = errors["base"] - create_repair(error) - return self.async_abort(reason=error) - - create_repair() - return result diff --git a/homeassistant/components/ecovacs/icons.json b/homeassistant/components/ecovacs/icons.json index 0c7178ced84..6097f43a4e4 100644 --- a/homeassistant/components/ecovacs/icons.json +++ b/homeassistant/components/ecovacs/icons.json @@ -145,6 +145,8 @@ } }, "services": { - "raw_get_positions": "mdi:map-marker-radius-outline" + "raw_get_positions": { + "service": "mdi:map-marker-radius-outline" + } } } diff --git a/homeassistant/components/ecovacs/lawn_mower.py b/homeassistant/components/ecovacs/lawn_mower.py index a1dc8acf3a2..bf773207dc5 100644 --- a/homeassistant/components/ecovacs/lawn_mower.py +++ b/homeassistant/components/ecovacs/lawn_mower.py @@ -27,7 +27,7 @@ _LOGGER = logging.getLogger(__name__) _STATE_TO_MOWER_STATE = { State.IDLE: LawnMowerActivity.PAUSED, State.CLEANING: LawnMowerActivity.MOWING, - State.RETURNING: LawnMowerActivity.MOWING, + State.RETURNING: LawnMowerActivity.RETURNING, State.DOCKED: LawnMowerActivity.DOCKED, State.ERROR: LawnMowerActivity.ERROR, State.PAUSED: LawnMowerActivity.PAUSED, diff --git a/homeassistant/components/ecovacs/manifest.json b/homeassistant/components/ecovacs/manifest.json index 560ee4d599c..33977b3b0de 100644 --- a/homeassistant/components/ecovacs/manifest.json +++ b/homeassistant/components/ecovacs/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/ecovacs", "iot_class": "cloud_push", "loggers": ["sleekxmppfs", "sucks", "deebot_client"], - "requirements": ["py-sucks==0.9.10", "deebot-client==8.3.0"] + "requirements": ["py-sucks==0.9.10", "deebot-client==8.4.0"] } diff --git a/homeassistant/components/ecovacs/strings.json b/homeassistant/components/ecovacs/strings.json index ea216f11694..8222cabed07 100644 --- a/homeassistant/components/ecovacs/strings.json +++ b/homeassistant/components/ecovacs/strings.json @@ -237,32 +237,6 @@ "message": "Getting the positions of the chargers and the device itself is not supported" } }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there was a connection error when trying to import the YAML configuration.\n\nPlease verify that you have a stable internet connection and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_invalid_auth": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there was an authentication error when trying to import the YAML configuration.\n\nCorrect the YAML configuration and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_unknown": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there was an unknown error when trying to import the YAML configuration.\n\nEnsure the YAML configuration is correct and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_invalid_country_length": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there is an invalid country specified in the YAML configuration.\n\nPlease change the country to the [Alpha-2 code of your country]({countries_url}) and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_invalid_continent_length": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there is an invalid continent specified in the YAML configuration.\n\nPlease correct the continent to be one of {continent_list} and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_continent_not_match": { - "title": "The Ecovacs YAML configuration import failed", - "description": "Configuring Ecovacs using YAML is being removed but there is an unexpected continent specified in the YAML configuration.\n\nFrom the given country, the continent \"{continent}\" is expected. Change the continent and restart Home Assistant to try again or remove the Ecovacs YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually.\n\nIf the contintent \"{continent}\" is not applicable, please open an issue on [GitHub]({github_issue_url})." - } - }, "selector": { "installation_mode": { "options": { diff --git a/homeassistant/components/ecovacs/vacuum.py b/homeassistant/components/ecovacs/vacuum.py index e690038ff71..0d14267e08d 100644 --- a/homeassistant/components/ecovacs/vacuum.py +++ b/homeassistant/components/ecovacs/vacuum.py @@ -65,7 +65,7 @@ async def async_setup_entry( platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( SERVICE_RAW_GET_POSITIONS, - {}, + None, "async_raw_get_positions", supports_response=SupportsResponse.ONLY, ) diff --git a/homeassistant/components/egardia/__init__.py b/homeassistant/components/egardia/__init__.py index 9ff4b9af94f..89dae7d23c9 100644 --- a/homeassistant/components/egardia/__init__.py +++ b/homeassistant/components/egardia/__init__.py @@ -113,7 +113,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: server = egardiaserver.EgardiaServer("", rs_port) bound = server.bind() if not bound: - raise OSError( + raise OSError( # noqa: TRY301 "Binding error occurred while starting EgardiaServer." ) hass.data[EGARDIA_SERVER] = server diff --git a/homeassistant/components/elgato/icons.json b/homeassistant/components/elgato/icons.json index 1b5eaf3763a..d2c286594c7 100644 --- a/homeassistant/components/elgato/icons.json +++ b/homeassistant/components/elgato/icons.json @@ -10,6 +10,8 @@ } }, "services": { - "identify": "mdi:crosshairs-question" + "identify": { + "service": "mdi:crosshairs-question" + } } } diff --git a/homeassistant/components/elgato/light.py b/homeassistant/components/elgato/light.py index 339bed97f6f..a62a26f21d3 100644 --- a/homeassistant/components/elgato/light.py +++ b/homeassistant/components/elgato/light.py @@ -40,7 +40,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_IDENTIFY, - {}, + None, ElgatoLight.async_identify.__name__, ) diff --git a/homeassistant/components/elkm1/config_flow.py b/homeassistant/components/elkm1/config_flow.py index 4ab8d1fe181..2f9d3338d76 100644 --- a/homeassistant/components/elkm1/config_flow.py +++ b/homeassistant/components/elkm1/config_flow.py @@ -335,10 +335,10 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" _LOGGER.debug("Elk is importing from yaml") - url = _make_url_from_data(user_input) + url = _make_url_from_data(import_data) if self._url_already_configured(url): return self.async_abort(reason="address_already_configured") @@ -357,7 +357,7 @@ class Elkm1ConfigFlow(ConfigFlow, domain=DOMAIN): ) self._abort_if_unique_id_configured() - errors, result = await self._async_create_or_error(user_input, True) + errors, result = await self._async_create_or_error(import_data, True) if errors: return self.async_abort(reason=list(errors.values())[0]) assert result is not None diff --git a/homeassistant/components/elkm1/icons.json b/homeassistant/components/elkm1/icons.json index 3bb9ea8c87d..54827e4b6ef 100644 --- a/homeassistant/components/elkm1/icons.json +++ b/homeassistant/components/elkm1/icons.json @@ -10,18 +10,44 @@ } }, "services": { - "alarm_bypass": "mdi:shield-off", - "alarm_clear_bypass": "mdi:shield", - "alarm_arm_home_instant": "mdi:shield-lock", - "alarm_arm_night_instant": "mdi:shield-moon", - "alarm_arm_vacation": "mdi:beach", - "alarm_display_message": "mdi:message-alert", - "set_time": "mdi:clock-edit", - "speak_phrase": "mdi:message-processing", - "speak_word": "mdi:message-minus", - "sensor_counter_refresh": "mdi:refresh", - "sensor_counter_set": "mdi:counter", - "sensor_zone_bypass": "mdi:shield-off", - "sensor_zone_trigger": "mdi:shield" + "alarm_bypass": { + "service": "mdi:shield-off" + }, + "alarm_clear_bypass": { + "service": "mdi:shield" + }, + "alarm_arm_home_instant": { + "service": "mdi:shield-lock" + }, + "alarm_arm_night_instant": { + "service": "mdi:shield-moon" + }, + "alarm_arm_vacation": { + "service": "mdi:beach" + }, + "alarm_display_message": { + "service": "mdi:message-alert" + }, + "set_time": { + "service": "mdi:clock-edit" + }, + "speak_phrase": { + "service": "mdi:message-processing" + }, + "speak_word": { + "service": "mdi:message-minus" + }, + "sensor_counter_refresh": { + "service": "mdi:refresh" + }, + "sensor_counter_set": { + "service": "mdi:counter" + }, + "sensor_zone_bypass": { + "service": "mdi:shield-off" + }, + "sensor_zone_trigger": { + "service": "mdi:shield" + } } } diff --git a/homeassistant/components/elkm1/sensor.py b/homeassistant/components/elkm1/sensor.py index 7d3601f0bd0..16f877719a7 100644 --- a/homeassistant/components/elkm1/sensor.py +++ b/homeassistant/components/elkm1/sensor.py @@ -56,7 +56,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_SENSOR_COUNTER_REFRESH, - {}, + None, "async_counter_refresh", ) platform.async_register_entity_service( @@ -71,7 +71,7 @@ async def async_setup_entry( ) platform.async_register_entity_service( SERVICE_SENSOR_ZONE_TRIGGER, - {}, + None, "async_zone_trigger", ) diff --git a/homeassistant/components/elmax/config_flow.py b/homeassistant/components/elmax/config_flow.py index 2971a425663..69f69a5fd31 100644 --- a/homeassistant/components/elmax/config_flow.py +++ b/homeassistant/components/elmax/config_flow.py @@ -424,7 +424,7 @@ class ElmaxConfigFlow(ConfigFlow, domain=DOMAIN): if p.hash == self._entry.data[CONF_ELMAX_PANEL_ID] ] if len(panels) < 1: - raise NoOnlinePanelsError + raise NoOnlinePanelsError # noqa: TRY301 # Verify the pin is still valid. await client.get_panel_status( diff --git a/homeassistant/components/emby/manifest.json b/homeassistant/components/emby/manifest.json index f90dda79352..3f57f62eb0b 100644 --- a/homeassistant/components/emby/manifest.json +++ b/homeassistant/components/emby/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/emby", "iot_class": "local_push", "loggers": ["pyemby"], - "requirements": ["pyEmby==1.9"] + "requirements": ["pyEmby==1.10"] } diff --git a/homeassistant/components/emoncms/sensor.py b/homeassistant/components/emoncms/sensor.py index c299c5a1b9f..3c448391974 100644 --- a/homeassistant/components/emoncms/sensor.py +++ b/homeassistant/components/emoncms/sensor.py @@ -87,9 +87,6 @@ async def async_setup_platform( sensor_names = config.get(CONF_SENSOR_NAMES) scan_interval = config.get(CONF_SCAN_INTERVAL, timedelta(seconds=30)) - if value_template is not None: - value_template.hass = hass - emoncms_client = EmoncmsClient(url, apikey, session=async_get_clientsession(hass)) coordinator = EmoncmsCoordinator(hass, emoncms_client, scan_interval) await coordinator.async_refresh() diff --git a/homeassistant/components/emonitor/config_flow.py b/homeassistant/components/emonitor/config_flow.py index 9909ddff19c..b90b1477f87 100644 --- a/homeassistant/components/emonitor/config_flow.py +++ b/homeassistant/components/emonitor/config_flow.py @@ -1,6 +1,7 @@ """Config flow for SiteSage Emonitor integration.""" import logging +from typing import Any from aioemonitor import Emonitor import aiohttp @@ -33,12 +34,14 @@ class EmonitorConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize Emonitor ConfigFlow.""" - self.discovered_ip = None - self.discovered_info = None + self.discovered_ip: str | None = None + self.discovered_info: dict[str, str] | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/emulated_kasa/__init__.py b/homeassistant/components/emulated_kasa/__init__.py index d5fc8af1aa4..408d8c4eff8 100644 --- a/homeassistant/components/emulated_kasa/__init__.py +++ b/homeassistant/components/emulated_kasa/__init__.py @@ -95,8 +95,6 @@ async def validate_configs(hass, entity_configs): power_val = entity_config[CONF_POWER] if isinstance(power_val, str) and is_template_string(power_val): entity_config[CONF_POWER] = Template(power_val, hass) - elif isinstance(power_val, Template): - entity_config[CONF_POWER].hass = hass elif CONF_POWER_ENTITY in entity_config: power_val = entity_config[CONF_POWER_ENTITY] if hass.states.get(power_val) is None: diff --git a/homeassistant/components/emulated_roku/config_flow.py b/homeassistant/components/emulated_roku/config_flow.py index 1a3b2c0e2af..eed0298fc57 100644 --- a/homeassistant/components/emulated_roku/config_flow.py +++ b/homeassistant/components/emulated_roku/config_flow.py @@ -1,8 +1,10 @@ """Config flow to configure emulated_roku component.""" +from typing import Any + import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME from homeassistant.core import callback @@ -22,9 +24,11 @@ class EmulatedRokuFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: self._async_abort_entries_match({CONF_NAME: user_input[CONF_NAME]}) @@ -52,6 +56,6 @@ class EmulatedRokuFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow import.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) diff --git a/homeassistant/components/energyzero/icons.json b/homeassistant/components/energyzero/icons.json index bac061dd318..802f8ef6916 100644 --- a/homeassistant/components/energyzero/icons.json +++ b/homeassistant/components/energyzero/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "get_gas_prices": "mdi:gas-station", - "get_energy_prices": "mdi:lightning-bolt" + "get_gas_prices": { + "service": "mdi:gas-station" + }, + "get_energy_prices": { + "service": "mdi:lightning-bolt" + } } } diff --git a/homeassistant/components/enigma2/config_flow.py b/homeassistant/components/enigma2/config_flow.py index 71c5830d550..55c0f6fc6ae 100644 --- a/homeassistant/components/enigma2/config_flow.py +++ b/homeassistant/components/enigma2/config_flow.py @@ -152,20 +152,20 @@ class Enigma2ConfigFlowHandler(ConfigFlow, domain=DOMAIN): ) return self.async_create_entry(data=user_input, title=user_input[CONF_HOST]) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle the import step.""" - if CONF_PORT not in user_input: - user_input[CONF_PORT] = DEFAULT_PORT - if CONF_SSL not in user_input: - user_input[CONF_SSL] = DEFAULT_SSL - user_input[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL + if CONF_PORT not in import_data: + import_data[CONF_PORT] = DEFAULT_PORT + if CONF_SSL not in import_data: + import_data[CONF_SSL] = DEFAULT_SSL + import_data[CONF_VERIFY_SSL] = DEFAULT_VERIFY_SSL - data = {key: user_input[key] for key in user_input if key in self.DATA_KEYS} + data = {key: import_data[key] for key in import_data if key in self.DATA_KEYS} options = { - key: user_input[key] for key in user_input if key in self.OPTIONS_KEYS + key: import_data[key] for key in import_data if key in self.OPTIONS_KEYS } - if errors := await self.validate_user_input(user_input): + if errors := await self.validate_user_input(import_data): async_create_issue( self.hass, DOMAIN, diff --git a/homeassistant/components/enigma2/coordinator.py b/homeassistant/components/enigma2/coordinator.py index f1da56309e8..a35e74f582f 100644 --- a/homeassistant/components/enigma2/coordinator.py +++ b/homeassistant/components/enigma2/coordinator.py @@ -72,11 +72,14 @@ class Enigma2UpdateCoordinator(DataUpdateCoordinator[OpenWebIfStatus]): self.device_info["model"] = about["info"]["model"] self.device_info["manufacturer"] = about["info"]["brand"] self.device_info[ATTR_IDENTIFIERS] = { - (DOMAIN, format_mac(iface["mac"])) for iface in about["info"]["ifaces"] + (DOMAIN, format_mac(iface["mac"])) + for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None } self.device_info[ATTR_CONNECTIONS] = { (CONNECTION_NETWORK_MAC, format_mac(iface["mac"])) for iface in about["info"]["ifaces"] + if "mac" in iface and iface["mac"] is not None } async def _async_update_data(self) -> OpenWebIfStatus: diff --git a/homeassistant/components/enocean/config_flow.py b/homeassistant/components/enocean/config_flow.py index 157d58bbf23..3105b3ab595 100644 --- a/homeassistant/components/enocean/config_flow.py +++ b/homeassistant/components/enocean/config_flow.py @@ -1,8 +1,10 @@ """Config flows for the ENOcean integration.""" +from typing import Any + import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_DEVICE from . import dongle @@ -20,19 +22,21 @@ class EnOceanFlowHandler(ConfigFlow, domain=DOMAIN): self.dongle_path = None self.discovery_info = None - async def async_step_import(self, data=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a yaml configuration.""" - if not await self.validate_enocean_conf(data): + if not await self.validate_enocean_conf(import_data): LOGGER.warning( "Cannot import yaml configuration: %s is not a valid dongle path", - data[CONF_DEVICE], + import_data[CONF_DEVICE], ) return self.async_abort(reason="invalid_dongle_path") - return self.create_enocean_entry(data) + return self.create_enocean_entry(import_data) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle an EnOcean config flow start.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/environment_canada/config_flow.py b/homeassistant/components/environment_canada/config_flow.py index a351bb0ef06..c4fd16f9522 100644 --- a/homeassistant/components/environment_canada/config_flow.py +++ b/homeassistant/components/environment_canada/config_flow.py @@ -1,13 +1,14 @@ """Config flow for Environment Canada integration.""" import logging -import xml.etree.ElementTree as et +from typing import Any +import xml.etree.ElementTree as ET import aiohttp from env_canada import ECWeather, ec_exc import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_LANGUAGE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.helpers import config_validation as cv @@ -46,13 +47,15 @@ class EnvironmentCanadaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: try: info = await validate_input(user_input) - except (et.ParseError, vol.MultipleInvalid, ec_exc.UnknownStationId): + except (ET.ParseError, vol.MultipleInvalid, ec_exc.UnknownStationId): errors["base"] = "bad_station_id" except aiohttp.ClientConnectionError: errors["base"] = "cannot_connect" diff --git a/homeassistant/components/environment_canada/coordinator.py b/homeassistant/components/environment_canada/coordinator.py index e17c360e3fb..8e77b309c78 100644 --- a/homeassistant/components/environment_canada/coordinator.py +++ b/homeassistant/components/environment_canada/coordinator.py @@ -1,7 +1,7 @@ """Coordinator for the Environment Canada (EC) component.""" import logging -import xml.etree.ElementTree as et +import xml.etree.ElementTree as ET from env_canada import ec_exc @@ -27,6 +27,6 @@ class ECDataUpdateCoordinator(DataUpdateCoordinator): """Fetch data from EC.""" try: await self.ec_data.update() - except (et.ParseError, ec_exc.UnknownStationId) as ex: + except (ET.ParseError, ec_exc.UnknownStationId) as ex: raise UpdateFailed(f"Error fetching {self.name} data: {ex}") from ex return self.ec_data diff --git a/homeassistant/components/environment_canada/icons.json b/homeassistant/components/environment_canada/icons.json index 5e23a96bcfb..c3562ce1840 100644 --- a/homeassistant/components/environment_canada/icons.json +++ b/homeassistant/components/environment_canada/icons.json @@ -19,6 +19,8 @@ } }, "services": { - "set_radar_type": "mdi:radar" + "set_radar_type": { + "service": "mdi:radar" + } } } diff --git a/homeassistant/components/envisalink/icons.json b/homeassistant/components/envisalink/icons.json index 20696067f76..b25e988f478 100644 --- a/homeassistant/components/envisalink/icons.json +++ b/homeassistant/components/envisalink/icons.json @@ -1,6 +1,10 @@ { "services": { - "alarm_keypress": "mdi:alarm-panel", - "invoke_custom_function": "mdi:console" + "alarm_keypress": { + "service": "mdi:alarm-panel" + }, + "invoke_custom_function": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/epic_games_store/calendar.py b/homeassistant/components/epic_games_store/calendar.py index 75c448e8467..2ebb381341e 100644 --- a/homeassistant/components/epic_games_store/calendar.py +++ b/homeassistant/components/epic_games_store/calendar.py @@ -16,7 +16,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import DOMAIN, CalendarType from .coordinator import EGSCalendarUpdateCoordinator -DateRange = namedtuple("DateRange", ["start", "end"]) +DateRange = namedtuple("DateRange", ["start", "end"]) # noqa: PYI024 async def async_setup_entry( diff --git a/homeassistant/components/epson/config_flow.py b/homeassistant/components/epson/config_flow.py index 4f038de9318..1e3b006a984 100644 --- a/homeassistant/components/epson/config_flow.py +++ b/homeassistant/components/epson/config_flow.py @@ -1,10 +1,11 @@ """Config flow for epson integration.""" import logging +from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT from . import validate_projector @@ -26,7 +27,9 @@ class EpsonConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/epson/icons.json b/homeassistant/components/epson/icons.json index a9237edcfd1..d41ddebcdce 100644 --- a/homeassistant/components/epson/icons.json +++ b/homeassistant/components/epson/icons.json @@ -1,5 +1,7 @@ { "services": { - "select_cmode": "mdi:palette" + "select_cmode": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/esphome/climate.py b/homeassistant/components/esphome/climate.py index da1cdfb0eab..1b9b53f24cd 100644 --- a/homeassistant/components/esphome/climate.py +++ b/homeassistant/components/esphome/climate.py @@ -58,6 +58,7 @@ from homeassistant.core import callback from .entity import ( EsphomeEntity, convert_api_error_ha_error, + esphome_float_state_property, esphome_state_property, platform_async_setup_entry, ) @@ -227,7 +228,7 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti return _SWING_MODES.from_esphome(self._state.swing_mode) @property - @esphome_state_property + @esphome_float_state_property def current_temperature(self) -> float | None: """Return the current temperature.""" return self._state.current_temperature @@ -241,19 +242,19 @@ class EsphomeClimateEntity(EsphomeEntity[ClimateInfo, ClimateState], ClimateEnti return round(self._state.current_humidity) @property - @esphome_state_property + @esphome_float_state_property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" return self._state.target_temperature @property - @esphome_state_property + @esphome_float_state_property def target_temperature_low(self) -> float | None: """Return the lowbound target temperature we try to reach.""" return self._state.target_temperature_low @property - @esphome_state_property + @esphome_float_state_property def target_temperature_high(self) -> float | None: """Return the highbound target temperature we try to reach.""" return self._state.target_temperature_high diff --git a/homeassistant/components/esphome/cover.py b/homeassistant/components/esphome/cover.py index 19ce4cbf55a..83c749f89ca 100644 --- a/homeassistant/components/esphome/cover.py +++ b/homeassistant/components/esphome/cover.py @@ -61,13 +61,13 @@ class EsphomeCover(EsphomeEntity[CoverInfo, CoverState], CoverEntity): @esphome_state_property def is_opening(self) -> bool: """Return if the cover is opening or not.""" - return self._state.current_operation == CoverOperation.IS_OPENING + return self._state.current_operation is CoverOperation.IS_OPENING @property @esphome_state_property def is_closing(self) -> bool: """Return if the cover is closing or not.""" - return self._state.current_operation == CoverOperation.IS_CLOSING + return self._state.current_operation is CoverOperation.IS_CLOSING @property @esphome_state_property diff --git a/homeassistant/components/esphome/entity.py b/homeassistant/components/esphome/entity.py index 6e02f8de869..455a3f8d105 100644 --- a/homeassistant/components/esphome/entity.py +++ b/homeassistant/components/esphome/entity.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Awaitable, Callable, Coroutine import functools import math -from typing import TYPE_CHECKING, Any, Concatenate, Generic, ParamSpec, TypeVar, cast +from typing import TYPE_CHECKING, Any, Concatenate, Generic, TypeVar, cast from aioesphomeapi import ( APIConnectionError, @@ -30,8 +30,6 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from .entry_data import ESPHomeConfigEntry, RuntimeEntryData from .enum_mapper import EsphomeEnumMapper -_R = TypeVar("_R") -_P = ParamSpec("_P") _InfoT = TypeVar("_InfoT", bound=EntityInfo) _EntityT = TypeVar("_EntityT", bound="EsphomeEntity[Any,Any]") _StateT = TypeVar("_StateT", bound=EntityState) @@ -96,7 +94,6 @@ async def platform_async_setup_entry( """ entry_data = entry.runtime_data entry_data.info[info_type] = {} - entry_data.state.setdefault(state_type, {}) platform = entity_platform.async_get_current_platform() on_static_info_update = functools.partial( async_static_info_updated, @@ -116,30 +113,45 @@ async def platform_async_setup_entry( ) -def esphome_state_property( +def esphome_state_property[_R, _EntityT: EsphomeEntity[Any, Any]]( func: Callable[[_EntityT], _R], ) -> Callable[[_EntityT], _R | None]: """Wrap a state property of an esphome entity. - This checks if the state object in the entity is set, and - prevents writing NAN values to the Home Assistant state machine. + This checks if the state object in the entity is set + and returns None if it is not set. """ @functools.wraps(func) def _wrapper(self: _EntityT) -> _R | None: - if not self._has_state: - return None - val = func(self) - if isinstance(val, float) and not math.isfinite(val): - # Home Assistant doesn't use NaN or inf values in state machine - # (not JSON serializable) - return None - return val + return func(self) if self._has_state else None return _wrapper -def convert_api_error_ha_error( +def esphome_float_state_property[_EntityT: EsphomeEntity[Any, Any]]( + func: Callable[[_EntityT], float | None], +) -> Callable[[_EntityT], float | None]: + """Wrap a state property of an esphome entity that returns a float. + + This checks if the state object in the entity is set, and returns + None if its not set. If also prevents writing NAN values to the + Home Assistant state machine. + """ + + @functools.wraps(func) + def _wrapper(self: _EntityT) -> float | None: + if not self._has_state: + return None + val = func(self) + # Home Assistant doesn't use NaN or inf values in state machine + # (not JSON serializable) + return None if val is None or not math.isfinite(val) else val + + return _wrapper + + +def convert_api_error_ha_error[**_P, _R, _EntityT: EsphomeEntity[Any, Any]]( func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], ) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: """Decorate ESPHome command calls that send commands/make changes to the device. @@ -190,6 +202,7 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): ) -> None: """Initialize.""" self._entry_data = entry_data + self._states = cast(dict[int, _StateT], entry_data.state[state_type]) assert entry_data.device_info is not None device_info = entry_data.device_info self._device_info = device_info @@ -267,11 +280,9 @@ class EsphomeEntity(Entity, Generic[_InfoT, _StateT]): @callback def _update_state_from_entry_data(self) -> None: """Update state from entry data.""" - state = self._entry_data.state key = self._key - state_type = self._state_type - if has_state := key in state[state_type]: - self._state = cast(_StateT, state[state_type][key]) + if has_state := key in self._states: + self._state = self._states[key] self._has_state = has_state @callback diff --git a/homeassistant/components/esphome/entry_data.py b/homeassistant/components/esphome/entry_data.py index ff6f048eba1..6fc40612c48 100644 --- a/homeassistant/components/esphome/entry_data.py +++ b/homeassistant/components/esphome/entry_data.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from collections import defaultdict from collections.abc import Callable, Iterable from dataclasses import dataclass, field from functools import partial @@ -111,7 +112,9 @@ class RuntimeEntryData: title: str client: APIClient store: ESPHomeStorage - state: dict[type[EntityState], dict[int, EntityState]] = field(default_factory=dict) + state: defaultdict[type[EntityState], dict[int, EntityState]] = field( + default_factory=lambda: defaultdict(dict) + ) # When the disconnect callback is called, we mark all states # as stale so we will always dispatch a state update when the # device reconnects. This is the same format as state_subscriptions. diff --git a/homeassistant/components/esphome/lock.py b/homeassistant/components/esphome/lock.py index 4caa1f68612..15a402ccb91 100644 --- a/homeassistant/components/esphome/lock.py +++ b/homeassistant/components/esphome/lock.py @@ -40,25 +40,25 @@ class EsphomeLock(EsphomeEntity[LockInfo, LockEntityState], LockEntity): @esphome_state_property def is_locked(self) -> bool | None: """Return true if the lock is locked.""" - return self._state.state == LockState.LOCKED + return self._state.state is LockState.LOCKED @property @esphome_state_property def is_locking(self) -> bool | None: """Return true if the lock is locking.""" - return self._state.state == LockState.LOCKING + return self._state.state is LockState.LOCKING @property @esphome_state_property def is_unlocking(self) -> bool | None: """Return true if the lock is unlocking.""" - return self._state.state == LockState.UNLOCKING + return self._state.state is LockState.UNLOCKING @property @esphome_state_property def is_jammed(self) -> bool | None: """Return true if the lock is jammed (incomplete locking).""" - return self._state.state == LockState.JAMMED + return self._state.state is LockState.JAMMED @convert_api_error_ha_error async def async_lock(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/esphome/manager.py b/homeassistant/components/esphome/manager.py index 4b4537d147f..93e8d7b5bc2 100644 --- a/homeassistant/components/esphome/manager.py +++ b/homeassistant/components/esphome/manager.py @@ -197,9 +197,9 @@ class ESPHomeManager: if service.data_template: try: data_template = { - key: Template(value) for key, value in service.data_template.items() + key: Template(value, hass) + for key, value in service.data_template.items() } - template.attach(hass, data_template) service_data.update( template.render_complex(data_template, service.variables) ) @@ -329,6 +329,15 @@ class ESPHomeManager: entity_id, attribute, hass.states.get(entity_id) ) + @callback + def async_on_state_request( + self, entity_id: str, attribute: str | None = None + ) -> None: + """Forward state for requested entity.""" + self._send_home_assistant_state( + entity_id, attribute, self.hass.states.get(entity_id) + ) + def _handle_pipeline_finished(self) -> None: self.entry_data.async_set_assist_pipeline_state(False) @@ -526,7 +535,10 @@ class ESPHomeManager: cli.subscribe_states(entry_data.async_update_state) cli.subscribe_service_calls(self.async_on_service_call) - cli.subscribe_home_assistant_states(self.async_on_state_subscription) + cli.subscribe_home_assistant_states( + self.async_on_state_subscription, + self.async_on_state_request, + ) entry_data.async_save_to_store() _async_check_firmware_version(hass, device_info, api_version) diff --git a/homeassistant/components/esphome/manifest.json b/homeassistant/components/esphome/manifest.json index 97724a12203..9d42b7206e3 100644 --- a/homeassistant/components/esphome/manifest.json +++ b/homeassistant/components/esphome/manifest.json @@ -17,7 +17,7 @@ "mqtt": ["esphome/discover/#"], "quality_scale": "platinum", "requirements": [ - "aioesphomeapi==25.0.0", + "aioesphomeapi==25.3.1", "esphome-dashboard-api==1.2.3", "bleak-esphome==1.0.0" ], diff --git a/homeassistant/components/esphome/media_player.py b/homeassistant/components/esphome/media_player.py index ec9d61fb9e7..f7c5d7011f8 100644 --- a/homeassistant/components/esphome/media_player.py +++ b/homeassistant/components/esphome/media_player.py @@ -29,6 +29,7 @@ from homeassistant.core import callback from .entity import ( EsphomeEntity, convert_api_error_ha_error, + esphome_float_state_property, esphome_state_property, platform_async_setup_entry, ) @@ -79,7 +80,7 @@ class EsphomeMediaPlayer( return self._state.muted @property - @esphome_state_property + @esphome_float_state_property def volume_level(self) -> float | None: """Volume level of the media player (0..1).""" return self._state.volume diff --git a/homeassistant/components/esphome/number.py b/homeassistant/components/esphome/number.py index 1e588c8d35e..2d74dad1bcf 100644 --- a/homeassistant/components/esphome/number.py +++ b/homeassistant/components/esphome/number.py @@ -3,7 +3,6 @@ from __future__ import annotations from functools import partial -import math from aioesphomeapi import ( EntityInfo, @@ -19,7 +18,7 @@ from homeassistant.util.enum import try_parse_enum from .entity import ( EsphomeEntity, convert_api_error_ha_error, - esphome_state_property, + esphome_float_state_property, platform_async_setup_entry, ) from .enum_mapper import EsphomeEnumMapper @@ -57,13 +56,11 @@ class EsphomeNumber(EsphomeEntity[NumberInfo, NumberState], NumberEntity): self._attr_mode = NumberMode.AUTO @property - @esphome_state_property + @esphome_float_state_property def native_value(self) -> float | None: """Return the state of the entity.""" state = self._state - if state.missing_state or not math.isfinite(state.state): - return None - return state.state + return None if state.missing_state else state.state @convert_api_error_ha_error async def async_set_native_value(self, value: float) -> None: diff --git a/homeassistant/components/esphome/sensor.py b/homeassistant/components/esphome/sensor.py index 0742bebed28..670c92d291e 100644 --- a/homeassistant/components/esphome/sensor.py +++ b/homeassistant/components/esphome/sensor.py @@ -26,7 +26,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import dt as dt_util from homeassistant.util.enum import try_parse_enum -from .entity import EsphomeEntity, esphome_state_property, platform_async_setup_entry +from .entity import EsphomeEntity, platform_async_setup_entry from .enum_mapper import EsphomeEnumMapper @@ -93,15 +93,16 @@ class EsphomeSensor(EsphomeEntity[SensorInfo, SensorState], SensorEntity): self._attr_state_class = _STATE_CLASSES.from_esphome(state_class) @property - @esphome_state_property def native_value(self) -> datetime | str | None: """Return the state of the entity.""" - state = self._state - if state.missing_state or not math.isfinite(state.state): + if not self._has_state or (state := self._state).missing_state: return None - if self._attr_device_class is SensorDeviceClass.TIMESTAMP: - return dt_util.utc_from_timestamp(state.state) - return f"{state.state:.{self._static_info.accuracy_decimals}f}" + state_float = state.state + if not math.isfinite(state_float): + return None + if self.device_class is SensorDeviceClass.TIMESTAMP: + return dt_util.utc_from_timestamp(state_float) + return f"{state_float:.{self._static_info.accuracy_decimals}f}" class EsphomeTextSensor(EsphomeEntity[TextSensorInfo, TextSensorState], SensorEntity): @@ -117,17 +118,17 @@ class EsphomeTextSensor(EsphomeEntity[TextSensorInfo, TextSensorState], SensorEn ) @property - @esphome_state_property def native_value(self) -> str | datetime | date | None: """Return the state of the entity.""" - state = self._state - if state.missing_state: + if not self._has_state or (state := self._state).missing_state: return None - if self._attr_device_class is SensorDeviceClass.TIMESTAMP: - return dt_util.parse_datetime(state.state) + state_str = state.state + device_class = self.device_class + if device_class is SensorDeviceClass.TIMESTAMP: + return dt_util.parse_datetime(state_str) if ( - self._attr_device_class is SensorDeviceClass.DATE - and (value := dt_util.parse_datetime(state.state)) is not None + device_class is SensorDeviceClass.DATE + and (value := dt_util.parse_datetime(state_str)) is not None ): return value.date() - return state.state + return state_str diff --git a/homeassistant/components/esphome/text.py b/homeassistant/components/esphome/text.py index f9dbbbcd853..36d77aac4a0 100644 --- a/homeassistant/components/esphome/text.py +++ b/homeassistant/components/esphome/text.py @@ -43,9 +43,7 @@ class EsphomeText(EsphomeEntity[TextInfo, TextState], TextEntity): def native_value(self) -> str | None: """Return the state of the entity.""" state = self._state - if state.missing_state: - return None - return state.state + return None if state.missing_state else state.state @convert_api_error_ha_error async def async_set_value(self, value: str) -> None: diff --git a/homeassistant/components/evohome/icons.json b/homeassistant/components/evohome/icons.json index cd0005e2546..54488440e60 100644 --- a/homeassistant/components/evohome/icons.json +++ b/homeassistant/components/evohome/icons.json @@ -1,9 +1,19 @@ { "services": { - "set_system_mode": "mdi:pencil", - "reset_system": "mdi:refresh", - "refresh_system": "mdi:refresh", - "set_zone_override": "mdi:motion-sensor", - "clear_zone_override": "mdi:motion-sensor-off" + "set_system_mode": { + "service": "mdi:pencil" + }, + "reset_system": { + "service": "mdi:refresh" + }, + "refresh_system": { + "service": "mdi:refresh" + }, + "set_zone_override": { + "service": "mdi:motion-sensor" + }, + "clear_zone_override": { + "service": "mdi:motion-sensor-off" + } } } diff --git a/homeassistant/components/ezviz/camera.py b/homeassistant/components/ezviz/camera.py index 455c41b385f..3c4a5f70ff4 100644 --- a/homeassistant/components/ezviz/camera.py +++ b/homeassistant/components/ezviz/camera.py @@ -112,7 +112,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( - SERVICE_WAKE_DEVICE, {}, "perform_wake_device" + SERVICE_WAKE_DEVICE, None, "perform_wake_device" ) diff --git a/homeassistant/components/ezviz/config_flow.py b/homeassistant/components/ezviz/config_flow.py index 2b47b120cf8..66425c675cc 100644 --- a/homeassistant/components/ezviz/config_flow.py +++ b/homeassistant/components/ezviz/config_flow.py @@ -319,7 +319,7 @@ class EzvizConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow for reauthentication with password.""" diff --git a/homeassistant/components/ezviz/icons.json b/homeassistant/components/ezviz/icons.json index 89b4747ed69..e4a2e49a22c 100644 --- a/homeassistant/components/ezviz/icons.json +++ b/homeassistant/components/ezviz/icons.json @@ -26,7 +26,11 @@ } }, "services": { - "set_alarm_detection_sensibility": "mdi:motion-sensor", - "wake_device": "mdi:sleep-off" + "set_alarm_detection_sensibility": { + "service": "mdi:motion-sensor" + }, + "wake_device": { + "service": "mdi:sleep-off" + } } } diff --git a/homeassistant/components/fan/__init__.py b/homeassistant/components/fan/__init__.py index 6ecc675a45e..5a15ece665a 100644 --- a/homeassistant/components/fan/__init__.py +++ b/homeassistant/components/fan/__init__.py @@ -139,11 +139,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: [FanEntityFeature.TURN_ON], ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [FanEntityFeature.TURN_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [FanEntityFeature.TURN_OFF] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [FanEntityFeature.TURN_OFF, FanEntityFeature.TURN_ON], ) diff --git a/homeassistant/components/fan/icons.json b/homeassistant/components/fan/icons.json index 60edbce5f01..caf80775f80 100644 --- a/homeassistant/components/fan/icons.json +++ b/homeassistant/components/fan/icons.json @@ -20,14 +20,32 @@ } }, "services": { - "decrease_speed": "mdi:fan-minus", - "increase_speed": "mdi:fan-plus", - "oscillate": "mdi:arrow-oscillating", - "set_direction": "mdi:rotate-3d-variant", - "set_percentage": "mdi:fan", - "set_preset_mode": "mdi:fan-auto", - "toggle": "mdi:fan", - "turn_off": "mdi:fan-off", - "turn_on": "mdi:fan" + "decrease_speed": { + "service": "mdi:fan-minus" + }, + "increase_speed": { + "service": "mdi:fan-plus" + }, + "oscillate": { + "service": "mdi:arrow-oscillating" + }, + "set_direction": { + "service": "mdi:rotate-3d-variant" + }, + "set_percentage": { + "service": "mdi:fan" + }, + "set_preset_mode": { + "service": "mdi:fan-auto" + }, + "toggle": { + "service": "mdi:fan" + }, + "turn_off": { + "service": "mdi:fan-off" + }, + "turn_on": { + "service": "mdi:fan" + } } } diff --git a/homeassistant/components/feedreader/config_flow.py b/homeassistant/components/feedreader/config_flow.py index d367432ff8c..4553978a47e 100644 --- a/homeassistant/components/feedreader/config_flow.py +++ b/homeassistant/components/feedreader/config_flow.py @@ -115,10 +115,10 @@ class FeedReaderConfigFlow(ConfigFlow, domain=DOMAIN): options={CONF_MAX_ENTRIES: self._max_entries or DEFAULT_MAX_ENTRIES}, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle an import flow.""" - self._max_entries = user_input[CONF_MAX_ENTRIES] - return await self.async_step_user({CONF_URL: user_input[CONF_URL]}) + self._max_entries = import_data[CONF_MAX_ENTRIES] + return await self.async_step_user({CONF_URL: import_data[CONF_URL]}) async def async_step_reconfigure( self, _: dict[str, Any] | None = None diff --git a/homeassistant/components/ffmpeg/icons.json b/homeassistant/components/ffmpeg/icons.json index a23f024599c..780eb071af1 100644 --- a/homeassistant/components/ffmpeg/icons.json +++ b/homeassistant/components/ffmpeg/icons.json @@ -1,7 +1,13 @@ { "services": { - "restart": "mdi:restart", - "start": "mdi:play", - "stop": "mdi:stop" + "restart": { + "service": "mdi:restart" + }, + "start": { + "service": "mdi:play" + }, + "stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/file/__init__.py b/homeassistant/components/file/__init__.py index aa3e241cc81..0c9cfee5f4d 100644 --- a/homeassistant/components/file/__init__.py +++ b/homeassistant/components/file/__init__.py @@ -1,5 +1,8 @@ """The file component.""" +from copy import deepcopy +from typing import Any + from homeassistant.components.notify import migrate_notify_issue from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import ( @@ -84,7 +87,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a file component entry.""" - config = dict(entry.data) + config = {**entry.data, **entry.options} filepath: str = config[CONF_FILE_PATH] if filepath and not await hass.async_add_executor_job( hass.config.is_allowed_path, filepath @@ -98,6 +101,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await hass.config_entries.async_forward_entry_setups( entry, [Platform(entry.data[CONF_PLATFORM])] ) + entry.async_on_unload(entry.add_update_listener(update_listener)) if entry.data[CONF_PLATFORM] == Platform.NOTIFY and CONF_NAME in entry.data: # New notify entities are being setup through the config entry, # but during the deprecation period we want to keep the legacy notify platform, @@ -121,3 +125,29 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return await hass.config_entries.async_unload_platforms( entry, [entry.data[CONF_PLATFORM]] ) + + +async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: + """Handle options update.""" + await hass.config_entries.async_reload(entry.entry_id) + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate config entry.""" + if config_entry.version > 2: + # Downgraded from future + return False + + if config_entry.version < 2: + # Move optional fields from data to options in config entry + data: dict[str, Any] = deepcopy(dict(config_entry.data)) + options = {} + for key, value in config_entry.data.items(): + if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): + data.pop(key) + options[key] = value + + hass.config_entries.async_update_entry( + config_entry, version=2, data=data, options=options + ) + return True diff --git a/homeassistant/components/file/config_flow.py b/homeassistant/components/file/config_flow.py index 2d729473929..d74e36ce935 100644 --- a/homeassistant/components/file/config_flow.py +++ b/homeassistant/components/file/config_flow.py @@ -1,11 +1,18 @@ """Config flow for file integration.""" +from copy import deepcopy import os from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, + OptionsFlowWithConfigEntry, +) from homeassistant.const import ( CONF_FILE_PATH, CONF_FILENAME, @@ -15,6 +22,7 @@ from homeassistant.const import ( CONF_VALUE_TEMPLATE, Platform, ) +from homeassistant.core import callback from homeassistant.helpers.selector import ( BooleanSelector, BooleanSelectorConfig, @@ -31,27 +39,44 @@ BOOLEAN_SELECTOR = BooleanSelector(BooleanSelectorConfig()) TEMPLATE_SELECTOR = TemplateSelector(TemplateSelectorConfig()) TEXT_SELECTOR = TextSelector(TextSelectorConfig(type=TextSelectorType.TEXT)) -FILE_FLOW_SCHEMAS = { +FILE_OPTIONS_SCHEMAS = { Platform.SENSOR.value: vol.Schema( { - vol.Required(CONF_FILE_PATH): TEXT_SELECTOR, vol.Optional(CONF_VALUE_TEMPLATE): TEMPLATE_SELECTOR, vol.Optional(CONF_UNIT_OF_MEASUREMENT): TEXT_SELECTOR, } ), Platform.NOTIFY.value: vol.Schema( { - vol.Required(CONF_FILE_PATH): TEXT_SELECTOR, vol.Optional(CONF_TIMESTAMP, default=False): BOOLEAN_SELECTOR, } ), } +FILE_FLOW_SCHEMAS = { + Platform.SENSOR.value: vol.Schema( + { + vol.Required(CONF_FILE_PATH): TEXT_SELECTOR, + } + ).extend(FILE_OPTIONS_SCHEMAS[Platform.SENSOR.value].schema), + Platform.NOTIFY.value: vol.Schema( + { + vol.Required(CONF_FILE_PATH): TEXT_SELECTOR, + } + ).extend(FILE_OPTIONS_SCHEMAS[Platform.NOTIFY.value].schema), +} + class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a file config flow.""" - VERSION = 1 + VERSION = 2 + + @staticmethod + @callback + def async_get_options_flow(config_entry: ConfigEntry) -> OptionsFlow: + """Get the options flow for this handler.""" + return FileOptionsFlowHandler(config_entry) async def validate_file_path(self, file_path: str) -> bool: """Ensure the file path is valid.""" @@ -80,7 +105,13 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): errors[CONF_FILE_PATH] = "not_allowed" else: title = f"{DEFAULT_NAME} [{user_input[CONF_FILE_PATH]}]" - return self.async_create_entry(data=user_input, title=title) + data = deepcopy(user_input) + options = {} + for key, value in user_input.items(): + if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): + data.pop(key) + options[key] = value + return self.async_create_entry(data=data, title=title, options=options) return self.async_show_form( step_id=platform, data_schema=FILE_FLOW_SCHEMAS[platform], errors=errors @@ -98,11 +129,8 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): """Handle file sensor config flow.""" return await self._async_handle_step(Platform.SENSOR.value, user_input) - async def async_step_import( - self, import_data: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import `file`` config from configuration.yaml.""" - assert import_data is not None self._async_abort_entries_match(import_data) platform = import_data[CONF_PLATFORM] name: str = import_data.get(CONF_NAME, DEFAULT_NAME) @@ -114,4 +142,29 @@ class FileConfigFlowHandler(ConfigFlow, domain=DOMAIN): else: file_path = import_data[CONF_FILE_PATH] title = f"{name} [{file_path}]" - return self.async_create_entry(title=title, data=import_data) + data = deepcopy(import_data) + options = {} + for key, value in import_data.items(): + if key not in (CONF_FILE_PATH, CONF_PLATFORM, CONF_NAME): + data.pop(key) + options[key] = value + return self.async_create_entry(title=title, data=data, options=options) + + +class FileOptionsFlowHandler(OptionsFlowWithConfigEntry): + """Handle File options.""" + + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage File options.""" + if user_input: + return self.async_create_entry(data=user_input) + + platform = self.config_entry.data[CONF_PLATFORM] + return self.async_show_form( + step_id="init", + data_schema=self.add_suggested_values_to_schema( + FILE_OPTIONS_SCHEMAS[platform], self.config_entry.options or {} + ), + ) diff --git a/homeassistant/components/file/notify.py b/homeassistant/components/file/notify.py index 1516efd6d96..9411b7cf1a8 100644 --- a/homeassistant/components/file/notify.py +++ b/homeassistant/components/file/notify.py @@ -5,7 +5,6 @@ from __future__ import annotations from functools import partial import logging import os -from types import MappingProxyType from typing import Any, TextIO import voluptuous as vol @@ -109,7 +108,7 @@ async def async_setup_entry( ) -> None: """Set up notify entity.""" unique_id = entry.entry_id - async_add_entities([FileNotifyEntity(unique_id, entry.data)]) + async_add_entities([FileNotifyEntity(unique_id, {**entry.data, **entry.options})]) class FileNotifyEntity(NotifyEntity): @@ -118,7 +117,7 @@ class FileNotifyEntity(NotifyEntity): _attr_icon = FILE_ICON _attr_supported_features = NotifyEntityFeature.TITLE - def __init__(self, unique_id: str, config: MappingProxyType[str, Any]) -> None: + def __init__(self, unique_id: str, config: dict[str, Any]) -> None: """Initialize the service.""" self._file_path: str = config[CONF_FILE_PATH] self._add_timestamp: bool = config.get(CONF_TIMESTAMP, False) diff --git a/homeassistant/components/file/sensor.py b/homeassistant/components/file/sensor.py index fda0d14a6aa..e37a3df86a6 100644 --- a/homeassistant/components/file/sensor.py +++ b/homeassistant/components/file/sensor.py @@ -60,14 +60,15 @@ async def async_setup_entry( ) -> None: """Set up the file sensor.""" config = dict(entry.data) + options = dict(entry.options) file_path: str = config[CONF_FILE_PATH] unique_id: str = entry.entry_id name: str = config.get(CONF_NAME, DEFAULT_NAME) - unit: str | None = config.get(CONF_UNIT_OF_MEASUREMENT) + unit: str | None = options.get(CONF_UNIT_OF_MEASUREMENT) value_template: Template | None = None - if CONF_VALUE_TEMPLATE in config: - value_template = Template(config[CONF_VALUE_TEMPLATE], hass) + if CONF_VALUE_TEMPLATE in options: + value_template = Template(options[CONF_VALUE_TEMPLATE], hass) async_add_entities( [FileSensor(unique_id, name, file_path, unit, value_template)], True diff --git a/homeassistant/components/file/strings.json b/homeassistant/components/file/strings.json index 9d49e6300e9..60ebf451f78 100644 --- a/homeassistant/components/file/strings.json +++ b/homeassistant/components/file/strings.json @@ -42,6 +42,22 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "options": { + "step": { + "init": { + "data": { + "value_template": "[%key:component::file::config::step::sensor::data::value_template%]", + "unit_of_measurement": "[%key:component::file::config::step::sensor::data::unit_of_measurement%]", + "timestamp": "[%key:component::file::config::step::notify::data::timestamp%]" + }, + "data_description": { + "value_template": "[%key:component::file::config::step::sensor::data_description::value_template%]", + "unit_of_measurement": "[%key:component::file::config::step::sensor::data_description::unit_of_measurement%]", + "timestamp": "[%key:component::file::config::step::notify::data_description::timestamp%]" + } + } + } + }, "exceptions": { "dir_not_allowed": { "message": "Access to {filename} is not allowed." diff --git a/homeassistant/components/filesize/coordinator.py b/homeassistant/components/filesize/coordinator.py index 37fba19fb4e..c0dbb14555e 100644 --- a/homeassistant/components/filesize/coordinator.py +++ b/homeassistant/components/filesize/coordinator.py @@ -46,14 +46,15 @@ class FileSizeCoordinator(DataUpdateCoordinator[dict[str, int | float | datetime def _update(self) -> os.stat_result: """Fetch file information.""" - if not hasattr(self, "path"): - self.path = self._get_full_path() - try: return self.path.stat() except OSError as error: raise UpdateFailed(f"Can not retrieve file statistics {error}") from error + async def _async_setup(self) -> None: + """Set up path.""" + self.path = await self.hass.async_add_executor_job(self._get_full_path) + async def _async_update_data(self) -> dict[str, float | int | datetime]: """Fetch file information.""" statinfo = await self.hass.async_add_executor_job(self._update) diff --git a/homeassistant/components/filter/icons.json b/homeassistant/components/filter/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/filter/icons.json +++ b/homeassistant/components/filter/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/fints/sensor.py b/homeassistant/components/fints/sensor.py index 2f47fdc09eb..8a92850ad47 100644 --- a/homeassistant/components/fints/sensor.py +++ b/homeassistant/components/fints/sensor.py @@ -28,7 +28,7 @@ SCAN_INTERVAL = timedelta(hours=4) ICON = "mdi:currency-eur" -BankCredentials = namedtuple("BankCredentials", "blz login pin url") +BankCredentials = namedtuple("BankCredentials", "blz login pin url") # noqa: PYI024 CONF_BIN = "bank_identification_number" CONF_ACCOUNTS = "accounts" diff --git a/homeassistant/components/fireservicerota/config_flow.py b/homeassistant/components/fireservicerota/config_flow.py index afaef17c5a6..7b7248d44a1 100644 --- a/homeassistant/components/fireservicerota/config_flow.py +++ b/homeassistant/components/fireservicerota/config_flow.py @@ -27,18 +27,20 @@ class FireServiceRotaFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize config flow.""" self.api = None self._base_url = None self._username = None self._password = None - self._existing_entry = None - self._description_placeholders = None + self._existing_entry: dict[str, Any] | None = None + self._description_placeholders: dict[str, str] | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is None: return self._show_setup_form(user_input, errors) diff --git a/homeassistant/components/firmata/config_flow.py b/homeassistant/components/firmata/config_flow.py index 571df351b25..4c0f800fff4 100644 --- a/homeassistant/components/firmata/config_flow.py +++ b/homeassistant/components/firmata/config_flow.py @@ -19,9 +19,7 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a firmata board as a config entry. This flow is triggered by `async_setup` for configured boards. @@ -30,14 +28,14 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): config entry yet (based on entry_id). It validates a connection and then adds the entry. """ - name = f"serial-{import_config[CONF_SERIAL_PORT]}" - import_config[CONF_NAME] = name + name = f"serial-{import_data[CONF_SERIAL_PORT]}" + import_data[CONF_NAME] = name # Connect to the board to verify connection and then shutdown # If either fail then we cannot continue _LOGGER.debug("Connecting to Firmata board %s to test connection", name) try: - api = await get_board(import_config) + api = await get_board(import_data) await api.shutdown() except RuntimeError as err: _LOGGER.error("Error connecting to PyMata board %s: %s", name, err) @@ -54,6 +52,4 @@ class FirmataFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="cannot_connect") _LOGGER.debug("Connection test to Firmata board %s successful", name) - return self.async_create_entry( - title=import_config[CONF_NAME], data=import_config - ) + return self.async_create_entry(title=import_data[CONF_NAME], data=import_data) diff --git a/homeassistant/components/fitbit/config_flow.py b/homeassistant/components/fitbit/config_flow.py index 0ae1973b5fb..eff4ba37773 100644 --- a/homeassistant/components/fitbit/config_flow.py +++ b/homeassistant/components/fitbit/config_flow.py @@ -93,6 +93,6 @@ class OAuth2FlowHandler( self._abort_if_unique_id_configured() return self.async_create_entry(title=profile.display_name, data=data) - async def async_step_import(self, data: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from YAML.""" - return await self.async_oauth_create_entry(data) + return await self.async_oauth_create_entry(import_data) diff --git a/homeassistant/components/flic/manifest.json b/homeassistant/components/flic/manifest.json index 8fc146ded6a..0442e4a7b7b 100644 --- a/homeassistant/components/flic/manifest.json +++ b/homeassistant/components/flic/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/flic", "iot_class": "local_push", "loggers": ["pyflic"], - "requirements": ["pyflic==2.0.3"] + "requirements": ["pyflic==2.0.4"] } diff --git a/homeassistant/components/flick_electric/config_flow.py b/homeassistant/components/flick_electric/config_flow.py index 7fe5fda3f4e..8a2455b9d14 100644 --- a/homeassistant/components/flick_electric/config_flow.py +++ b/homeassistant/components/flick_electric/config_flow.py @@ -2,12 +2,13 @@ import asyncio import logging +from typing import Any from pyflick.authentication import AuthException, SimpleFlickAuth from pyflick.const import DEFAULT_CLIENT_ID, DEFAULT_CLIENT_SECRET import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_CLIENT_ID, CONF_CLIENT_SECRET, @@ -55,7 +56,9 @@ class FlickConfigFlow(ConfigFlow, domain=DOMAIN): return token is not None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle gathering login info.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/flipr/manifest.json b/homeassistant/components/flipr/manifest.json index 898cd640349..1f9b04e3d57 100644 --- a/homeassistant/components/flipr/manifest.json +++ b/homeassistant/components/flipr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/flipr", "iot_class": "cloud_polling", "loggers": ["flipr_api"], - "requirements": ["flipr-api==1.5.1"] + "requirements": ["flipr-api==1.6.0"] } diff --git a/homeassistant/components/flo/config_flow.py b/homeassistant/components/flo/config_flow.py index ec92b60c740..bd524c590fa 100644 --- a/homeassistant/components/flo/config_flow.py +++ b/homeassistant/components/flo/config_flow.py @@ -1,10 +1,12 @@ """Config flow for flo integration.""" +from typing import Any + from aioflo import async_get_api from aioflo.errors import RequestError import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -36,7 +38,9 @@ class FloConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/flo/icons.json b/homeassistant/components/flo/icons.json index 3164781c1b4..4bd0380c56c 100644 --- a/homeassistant/components/flo/icons.json +++ b/homeassistant/components/flo/icons.json @@ -10,9 +10,17 @@ } }, "services": { - "set_sleep_mode": "mdi:sleep", - "set_away_mode": "mdi:home-off", - "set_home_mode": "mdi:home", - "run_health_test": "mdi:heart-flash" + "set_sleep_mode": { + "service": "mdi:sleep" + }, + "set_away_mode": { + "service": "mdi:home-off" + }, + "set_home_mode": { + "service": "mdi:home" + }, + "run_health_test": { + "service": "mdi:heart-flash" + } } } diff --git a/homeassistant/components/flo/switch.py b/homeassistant/components/flo/switch.py index ab201dfb906..f0460839837 100644 --- a/homeassistant/components/flo/switch.py +++ b/homeassistant/components/flo/switch.py @@ -42,13 +42,13 @@ async def async_setup_entry( platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( - SERVICE_SET_AWAY_MODE, {}, "async_set_mode_away" + SERVICE_SET_AWAY_MODE, None, "async_set_mode_away" ) platform.async_register_entity_service( - SERVICE_SET_HOME_MODE, {}, "async_set_mode_home" + SERVICE_SET_HOME_MODE, None, "async_set_mode_home" ) platform.async_register_entity_service( - SERVICE_RUN_HEALTH_TEST, {}, "async_run_health_test" + SERVICE_RUN_HEALTH_TEST, None, "async_run_health_test" ) platform.async_register_entity_service( SERVICE_SET_SLEEP_MODE, diff --git a/homeassistant/components/flume/icons.json b/homeassistant/components/flume/icons.json index 631c0645ed3..90830943689 100644 --- a/homeassistant/components/flume/icons.json +++ b/homeassistant/components/flume/icons.json @@ -10,6 +10,8 @@ } }, "services": { - "list_notifications": "mdi:bell" + "list_notifications": { + "service": "mdi:bell" + } } } diff --git a/homeassistant/components/flux_led/icons.json b/homeassistant/components/flux_led/icons.json index 873fcd7c441..07c27869ff7 100644 --- a/homeassistant/components/flux_led/icons.json +++ b/homeassistant/components/flux_led/icons.json @@ -54,8 +54,14 @@ } }, "services": { - "set_custom_effect": "mdi:creation", - "set_zones": "mdi:texture-box", - "set_music_mode": "mdi:music" + "set_custom_effect": { + "service": "mdi:creation" + }, + "set_zones": { + "service": "mdi:texture-box" + }, + "set_music_mode": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/forked_daapd/config_flow.py b/homeassistant/components/forked_daapd/config_flow.py index 7edf25a2595..1f76fe21bad 100644 --- a/homeassistant/components/forked_daapd/config_flow.py +++ b/homeassistant/components/forked_daapd/config_flow.py @@ -2,6 +2,7 @@ from contextlib import suppress import logging +from typing import Any from pyforked_daapd import ForkedDaapdAPI import voluptuous as vol @@ -135,7 +136,9 @@ class ForkedDaapdFlowHandler(ConfigFlow, domain=DOMAIN): ) return validate_result - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a forked-daapd config flow start. Manage device specific parameters. diff --git a/homeassistant/components/foscam/config_flow.py b/homeassistant/components/foscam/config_flow.py index 8a005f19f09..19c19a1a5f5 100644 --- a/homeassistant/components/foscam/config_flow.py +++ b/homeassistant/components/foscam/config_flow.py @@ -1,5 +1,7 @@ """Config flow for foscam integration.""" +from typing import Any + from libpyfoscam import FoscamCamera from libpyfoscam.foscam import ( ERROR_FOSCAM_AUTH, @@ -8,7 +10,7 @@ from libpyfoscam.foscam import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_HOST, CONF_NAME, @@ -90,7 +92,9 @@ class FoscamConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=name, data=data) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/foscam/icons.json b/homeassistant/components/foscam/icons.json index 0c7dba9a4df..437575024d1 100644 --- a/homeassistant/components/foscam/icons.json +++ b/homeassistant/components/foscam/icons.json @@ -1,6 +1,10 @@ { "services": { - "ptz": "mdi:pan", - "ptz_preset": "mdi:target-variant" + "ptz": { + "service": "mdi:pan" + }, + "ptz_preset": { + "service": "mdi:target-variant" + } } } diff --git a/homeassistant/components/foursquare/icons.json b/homeassistant/components/foursquare/icons.json index cf60ed9f247..8e2b4e91d5f 100644 --- a/homeassistant/components/foursquare/icons.json +++ b/homeassistant/components/foursquare/icons.json @@ -1,5 +1,7 @@ { "services": { - "checkin": "mdi:map-marker" + "checkin": { + "service": "mdi:map-marker" + } } } diff --git a/homeassistant/components/freebox/icons.json b/homeassistant/components/freebox/icons.json index 81361d2c990..f4184f0673e 100644 --- a/homeassistant/components/freebox/icons.json +++ b/homeassistant/components/freebox/icons.json @@ -1,5 +1,7 @@ { "services": { - "reboot": "mdi:restart" + "reboot": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/freedompro/config_flow.py b/homeassistant/components/freedompro/config_flow.py index f1dd9dbbf14..f986cd05904 100644 --- a/homeassistant/components/freedompro/config_flow.py +++ b/homeassistant/components/freedompro/config_flow.py @@ -1,9 +1,11 @@ """Config flow to configure Freedompro.""" +from typing import Any + from pyfreedompro import get_list import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -45,7 +47,9 @@ class FreedomProConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Show the setup form to the user.""" if user_input is None: return self.async_show_form( diff --git a/homeassistant/components/fritz/coordinator.py b/homeassistant/components/fritz/coordinator.py index e97b988c391..13c442a1ace 100644 --- a/homeassistant/components/fritz/coordinator.py +++ b/homeassistant/components/fritz/coordinator.py @@ -568,8 +568,7 @@ class FritzBoxTools(DataUpdateCoordinator[UpdateCoordinatorDataType]): self.fritz_hosts.get_mesh_topology ) ): - # pylint: disable-next=broad-exception-raised - raise Exception("Mesh supported but empty topology reported") + raise Exception("Mesh supported but empty topology reported") # noqa: TRY002 except FritzActionError: self.mesh_role = MeshRoles.SLAVE # Avoid duplicating device trackers diff --git a/homeassistant/components/fritz/icons.json b/homeassistant/components/fritz/icons.json index d2154dc7232..481568a4c2c 100644 --- a/homeassistant/components/fritz/icons.json +++ b/homeassistant/components/fritz/icons.json @@ -51,9 +51,17 @@ } }, "services": { - "reconnect": "mdi:connection", - "reboot": "mdi:refresh", - "cleanup": "mdi:broom", - "set_guest_wifi_password": "mdi:form-textbox-password" + "reconnect": { + "service": "mdi:connection" + }, + "reboot": { + "service": "mdi:refresh" + }, + "cleanup": { + "service": "mdi:broom" + }, + "set_guest_wifi_password": { + "service": "mdi:form-textbox-password" + } } } diff --git a/homeassistant/components/fritz/strings.json b/homeassistant/components/fritz/strings.json index 3b6c60ed48f..6be393cc636 100644 --- a/homeassistant/components/fritz/strings.json +++ b/homeassistant/components/fritz/strings.json @@ -7,7 +7,8 @@ "description": "Discovered FRITZ!Box: {name}\n\nSet up FRITZ!Box Tools to control your {name}", "data": { "username": "[%key:common::config_flow::data::username%]", - "password": "[%key:common::config_flow::data::password%]" + "password": "[%key:common::config_flow::data::password%]", + "ssl": "[%key:common::config_flow::data::ssl%]" } }, "reauth_confirm": { diff --git a/homeassistant/components/fritzbox/light.py b/homeassistant/components/fritzbox/light.py index 689e64c709a..c19d7a8600d 100644 --- a/homeassistant/components/fritzbox/light.py +++ b/homeassistant/components/fritzbox/light.py @@ -17,11 +17,9 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import FritzboxDataUpdateCoordinator, FritzBoxDeviceEntity -from .const import COLOR_MODE, COLOR_TEMP_MODE, LOGGER +from .const import COLOR_MODE, LOGGER from .coordinator import FritzboxConfigEntry -SUPPORTED_COLOR_MODES = {ColorMode.COLOR_TEMP, ColorMode.HS} - async def async_setup_entry( hass: HomeAssistant, @@ -61,6 +59,12 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): super().__init__(coordinator, ain, None) self._supported_hs: dict[int, list[int]] = {} + self._attr_supported_color_modes = {ColorMode.ONOFF} + if self.data.has_color: + self._attr_supported_color_modes = {ColorMode.COLOR_TEMP, ColorMode.HS} + elif self.data.has_level: + self._attr_supported_color_modes = {ColorMode.BRIGHTNESS} + @property def is_on(self) -> bool: """If the light is currently on or off.""" @@ -72,22 +76,16 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): return self.data.level # type: ignore [no-any-return] @property - def hs_color(self) -> tuple[float, float] | None: + def hs_color(self) -> tuple[float, float]: """Return the hs color value.""" - if self.data.color_mode != COLOR_MODE: - return None - hue = self.data.hue saturation = self.data.saturation return (hue, float(saturation) * 100.0 / 255.0) @property - def color_temp_kelvin(self) -> int | None: + def color_temp_kelvin(self) -> int: """Return the CT color value.""" - if self.data.color_mode != COLOR_TEMP_MODE: - return None - return self.data.color_temp # type: ignore [no-any-return] @property @@ -101,15 +99,6 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): return ColorMode.BRIGHTNESS return ColorMode.ONOFF - @property - def supported_color_modes(self) -> set[ColorMode]: - """Flag supported color modes.""" - if self.data.has_color: - return SUPPORTED_COLOR_MODES - if self.data.has_level: - return {ColorMode.BRIGHTNESS} - return {ColorMode.ONOFF} - async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" if kwargs.get(ATTR_BRIGHTNESS) is not None: @@ -163,12 +152,14 @@ class FritzboxLight(FritzBoxDeviceEntity, LightEntity): async def async_added_to_hass(self) -> None: """Get light attributes from device after entity is added to hass.""" await super().async_added_to_hass() - supported_colors = await self.hass.async_add_executor_job( - self.coordinator.data.devices[self.ain].get_colors - ) - supported_color_temps = await self.hass.async_add_executor_job( - self.coordinator.data.devices[self.ain].get_color_temps - ) + + def _get_color_data() -> tuple[dict, list]: + return (self.data.get_colors(), self.data.get_color_temps()) + + ( + supported_colors, + supported_color_temps, + ) = await self.hass.async_add_executor_job(_get_color_data) if supported_color_temps: # only available for color bulbs diff --git a/homeassistant/components/fronius/sensor.py b/homeassistant/components/fronius/sensor.py index 31f080c1f51..c8a840b1c2c 100644 --- a/homeassistant/components/fronius/sensor.py +++ b/homeassistant/components/fronius/sensor.py @@ -14,7 +14,6 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, EntityCategory, UnitOfApparentPower, UnitOfElectricCurrent, @@ -22,6 +21,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfTemperature, ) from homeassistant.core import HomeAssistant, callback @@ -381,28 +381,28 @@ METER_ENTITY_DESCRIPTIONS: list[FroniusSensorEntityDescription] = [ ), FroniusSensorEntityDescription( key="power_reactive_phase_1", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), FroniusSensorEntityDescription( key="power_reactive_phase_2", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), FroniusSensorEntityDescription( key="power_reactive_phase_3", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), FroniusSensorEntityDescription( key="power_reactive", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, diff --git a/homeassistant/components/frontend/icons.json b/homeassistant/components/frontend/icons.json index 9fbe4d5b9b0..b4bcdef6194 100644 --- a/homeassistant/components/frontend/icons.json +++ b/homeassistant/components/frontend/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_theme": "mdi:palette-swatch", - "reload_themes": "mdi:reload" + "set_theme": { + "service": "mdi:palette-swatch" + }, + "reload_themes": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/frontend/manifest.json b/homeassistant/components/frontend/manifest.json index 035b087e481..fbdafe6025d 100644 --- a/homeassistant/components/frontend/manifest.json +++ b/homeassistant/components/frontend/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/frontend", "integration_type": "system", "quality_scale": "internal", - "requirements": ["home-assistant-frontend==20240809.0"] + "requirements": ["home-assistant-frontend==20240904.0"] } diff --git a/homeassistant/components/frontier_silicon/config_flow.py b/homeassistant/components/frontier_silicon/config_flow.py index 103323ff575..8a3c5fe086f 100644 --- a/homeassistant/components/frontier_silicon/config_flow.py +++ b/homeassistant/components/frontier_silicon/config_flow.py @@ -172,9 +172,11 @@ class FrontierSiliconConfigFlow(ConfigFlow, domain=DOMAIN): step_id="confirm", description_placeholders={"name": self._name} ) - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - self._webfsapi_url = config[CONF_WEBFSAPI_URL] + self._webfsapi_url = entry_data[CONF_WEBFSAPI_URL] self._reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] diff --git a/homeassistant/components/fujitsu_fglair/__init__.py b/homeassistant/components/fujitsu_fglair/__init__.py new file mode 100644 index 00000000000..bd891f05b8d --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/__init__.py @@ -0,0 +1,49 @@ +"""The Fujitsu HVAC (based on Ayla IOT) integration.""" + +from __future__ import annotations + +from contextlib import suppress + +from ayla_iot_unofficial import new_ayla_api + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import aiohttp_client + +from .const import API_TIMEOUT, CONF_EUROPE, FGLAIR_APP_ID, FGLAIR_APP_SECRET +from .coordinator import FGLairCoordinator + +PLATFORMS: list[Platform] = [Platform.CLIMATE] + +type FGLairConfigEntry = ConfigEntry[FGLairCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: FGLairConfigEntry) -> bool: + """Set up Fujitsu HVAC (based on Ayla IOT) from a config entry.""" + api = new_ayla_api( + entry.data[CONF_USERNAME], + entry.data[CONF_PASSWORD], + FGLAIR_APP_ID, + FGLAIR_APP_SECRET, + europe=entry.data[CONF_EUROPE], + websession=aiohttp_client.async_get_clientsession(hass), + timeout=API_TIMEOUT, + ) + + coordinator = FGLairCoordinator(hass, api) + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: FGLairConfigEntry) -> bool: + """Unload a config entry.""" + unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + with suppress(TimeoutError): + await entry.runtime_data.api.async_sign_out() + + return unload_ok diff --git a/homeassistant/components/fujitsu_fglair/climate.py b/homeassistant/components/fujitsu_fglair/climate.py new file mode 100644 index 00000000000..726096eab1a --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/climate.py @@ -0,0 +1,173 @@ +"""Support for Fujitsu HVAC devices that use the Ayla Iot platform.""" + +from typing import Any + +from ayla_iot_unofficial.fujitsu_hvac import ( + Capability, + FanSpeed, + FujitsuHVAC, + OpMode, + SwingMode, +) + +from homeassistant.components.climate import ( + FAN_AUTO, + FAN_HIGH, + FAN_LOW, + FAN_MEDIUM, + SWING_BOTH, + SWING_HORIZONTAL, + SWING_OFF, + SWING_VERTICAL, + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, PRECISION_HALVES, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import FGLairConfigEntry +from .const import DOMAIN +from .coordinator import FGLairCoordinator + +HA_TO_FUJI_FAN = { + FAN_LOW: FanSpeed.LOW, + FAN_MEDIUM: FanSpeed.MEDIUM, + FAN_HIGH: FanSpeed.HIGH, + FAN_AUTO: FanSpeed.AUTO, +} +FUJI_TO_HA_FAN = {value: key for key, value in HA_TO_FUJI_FAN.items()} + +HA_TO_FUJI_HVAC = { + HVACMode.OFF: OpMode.OFF, + HVACMode.HEAT: OpMode.HEAT, + HVACMode.COOL: OpMode.COOL, + HVACMode.HEAT_COOL: OpMode.AUTO, + HVACMode.DRY: OpMode.DRY, + HVACMode.FAN_ONLY: OpMode.FAN, +} +FUJI_TO_HA_HVAC = {value: key for key, value in HA_TO_FUJI_HVAC.items()} + +HA_TO_FUJI_SWING = { + SWING_OFF: SwingMode.OFF, + SWING_VERTICAL: SwingMode.SWING_VERTICAL, + SWING_HORIZONTAL: SwingMode.SWING_HORIZONTAL, + SWING_BOTH: SwingMode.SWING_BOTH, +} +FUJI_TO_HA_SWING = {value: key for key, value in HA_TO_FUJI_SWING.items()} + + +async def async_setup_entry( + hass: HomeAssistant, + entry: FGLairConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up one Fujitsu HVAC device.""" + async_add_entities( + FGLairDevice(entry.runtime_data, device) + for device in entry.runtime_data.data.values() + ) + + +class FGLairDevice(CoordinatorEntity[FGLairCoordinator], ClimateEntity): + """Represent a Fujitsu HVAC device.""" + + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_precision = PRECISION_HALVES + _attr_target_temperature_step = 0.5 + _attr_has_entity_name = True + _attr_name = None + + _enable_turn_on_off_backwards_compatibility: bool = False + + def __init__(self, coordinator: FGLairCoordinator, device: FujitsuHVAC) -> None: + """Store the representation of the device and set the static attributes.""" + super().__init__(coordinator, context=device.device_serial_number) + + self._attr_unique_id = device.device_serial_number + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device.device_serial_number)}, + name=device.device_name, + manufacturer="Fujitsu", + model=device.property_values["model_name"], + serial_number=device.device_serial_number, + sw_version=device.property_values["mcu_firmware_version"], + ) + + self._attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE + | ClimateEntityFeature.TURN_ON + | ClimateEntityFeature.TURN_OFF + ) + if device.has_capability(Capability.OP_FAN): + self._attr_supported_features |= ClimateEntityFeature.FAN_MODE + + if device.has_capability(Capability.SWING_HORIZONTAL) or device.has_capability( + Capability.SWING_VERTICAL + ): + self._attr_supported_features |= ClimateEntityFeature.SWING_MODE + self._set_attr() + + @property + def device(self) -> FujitsuHVAC: + """Return the device object from the coordinator data.""" + return self.coordinator.data[self.coordinator_context] + + @property + def available(self) -> bool: + """Return if the device is available.""" + return super().available and self.coordinator_context in self.coordinator.data + + async def async_set_fan_mode(self, fan_mode: str) -> None: + """Set Fan mode.""" + await self.device.async_set_fan_speed(HA_TO_FUJI_FAN[fan_mode]) + await self.coordinator.async_request_refresh() + + async def async_set_hvac_mode(self, hvac_mode: HVACMode) -> None: + """Set HVAC mode.""" + await self.device.async_set_op_mode(HA_TO_FUJI_HVAC[hvac_mode]) + await self.coordinator.async_request_refresh() + + async def async_set_swing_mode(self, swing_mode: str) -> None: + """Set swing mode.""" + await self.device.async_set_swing_mode(HA_TO_FUJI_SWING[swing_mode]) + await self.coordinator.async_request_refresh() + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set target temperature.""" + if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: + return + await self.device.async_set_set_temp(temperature) + await self.coordinator.async_request_refresh() + + def _set_attr(self) -> None: + if self.coordinator_context in self.coordinator.data: + self._attr_fan_mode = FUJI_TO_HA_FAN.get(self.device.fan_speed) + self._attr_fan_modes = [ + FUJI_TO_HA_FAN[mode] + for mode in self.device.supported_fan_speeds + if mode in FUJI_TO_HA_FAN + ] + self._attr_hvac_mode = FUJI_TO_HA_HVAC.get(self.device.op_mode) + self._attr_hvac_modes = [ + FUJI_TO_HA_HVAC[mode] + for mode in self.device.supported_op_modes + if mode in FUJI_TO_HA_HVAC + ] + self._attr_swing_mode = FUJI_TO_HA_SWING.get(self.device.swing_mode) + self._attr_swing_modes = [ + FUJI_TO_HA_SWING[mode] + for mode in self.device.supported_swing_modes + if mode in FUJI_TO_HA_SWING + ] + self._attr_min_temp = self.device.temperature_range[0] + self._attr_max_temp = self.device.temperature_range[1] + self._attr_current_temperature = self.device.sensed_temp + self._attr_target_temperature = self.device.set_temp + + def _handle_coordinator_update(self) -> None: + self._set_attr() + super()._handle_coordinator_update() diff --git a/homeassistant/components/fujitsu_fglair/config_flow.py b/homeassistant/components/fujitsu_fglair/config_flow.py new file mode 100644 index 00000000000..5021e495656 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/config_flow.py @@ -0,0 +1,119 @@ +"""Config flow for Fujitsu HVAC (based on Ayla IOT) integration.""" + +from collections.abc import Mapping +import logging +from typing import Any + +from ayla_iot_unofficial import AylaAuthError, new_ayla_api +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers import aiohttp_client + +from .const import API_TIMEOUT, CONF_EUROPE, DOMAIN, FGLAIR_APP_ID, FGLAIR_APP_SECRET + +_LOGGER = logging.getLogger(__name__) + + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + vol.Required(CONF_EUROPE): bool, + } +) +STEP_REAUTH_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_PASSWORD): str, + } +) + + +class FGLairConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Fujitsu HVAC (based on Ayla IOT).""" + + _reauth_entry: ConfigEntry | None = None + + async def _async_validate_credentials( + self, user_input: dict[str, Any] + ) -> dict[str, str]: + errors: dict[str, str] = {} + api = new_ayla_api( + user_input[CONF_USERNAME], + user_input[CONF_PASSWORD], + FGLAIR_APP_ID, + FGLAIR_APP_SECRET, + europe=user_input[CONF_EUROPE], + websession=aiohttp_client.async_get_clientsession(self.hass), + timeout=API_TIMEOUT, + ) + try: + await api.async_sign_in() + except TimeoutError: + errors["base"] = "cannot_connect" + except AylaAuthError: + errors["base"] = "invalid_auth" + except Exception: # pylint: disable=broad-except + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + + return errors + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input: + await self.async_set_unique_id(user_input[CONF_USERNAME].lower()) + self._abort_if_unique_id_configured() + + errors = await self._async_validate_credentials(user_input) + if not errors: + return self.async_create_entry( + title=f"FGLair ({user_input[CONF_USERNAME]})", + data=user_input, + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Perform reauth upon an API authentication error.""" + self._reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Dialog that informs the user that reauth is required.""" + errors: dict[str, str] = {} + assert self._reauth_entry + + if user_input: + reauth_data = { + **self._reauth_entry.data, + CONF_PASSWORD: user_input[CONF_PASSWORD], + } + errors = await self._async_validate_credentials(reauth_data) + + if len(errors) == 0: + return self.async_update_reload_and_abort( + self._reauth_entry, data=reauth_data + ) + + return self.async_show_form( + step_id="reauth_confirm", + data_schema=STEP_REAUTH_DATA_SCHEMA, + description_placeholders={ + CONF_USERNAME: self._reauth_entry.data[CONF_USERNAME], + **self.context["title_placeholders"], + }, + errors=errors, + ) diff --git a/homeassistant/components/fujitsu_fglair/const.py b/homeassistant/components/fujitsu_fglair/const.py new file mode 100644 index 00000000000..a9d485281a3 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/const.py @@ -0,0 +1,15 @@ +"""Constants for the Fujitsu HVAC (based on Ayla IOT) integration.""" + +from datetime import timedelta + +from ayla_iot_unofficial.fujitsu_consts import ( # noqa: F401 + FGLAIR_APP_ID, + FGLAIR_APP_SECRET, +) + +API_TIMEOUT = 10 +API_REFRESH = timedelta(minutes=5) + +DOMAIN = "fujitsu_fglair" + +CONF_EUROPE = "is_europe" diff --git a/homeassistant/components/fujitsu_fglair/coordinator.py b/homeassistant/components/fujitsu_fglair/coordinator.py new file mode 100644 index 00000000000..eac3cfd6ce5 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/coordinator.py @@ -0,0 +1,63 @@ +"""Coordinator for Fujitsu HVAC integration.""" + +import logging + +from ayla_iot_unofficial import AylaApi, AylaAuthError +from ayla_iot_unofficial.fujitsu_hvac import FujitsuHVAC + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator + +from .const import API_REFRESH + +_LOGGER = logging.getLogger(__name__) + + +class FGLairCoordinator(DataUpdateCoordinator[dict[str, FujitsuHVAC]]): + """Coordinator for Fujitsu HVAC integration.""" + + def __init__(self, hass: HomeAssistant, api: AylaApi) -> None: + """Initialize coordinator for Fujitsu HVAC integration.""" + super().__init__( + hass, + _LOGGER, + name="Fujitsu HVAC data", + update_interval=API_REFRESH, + ) + self.api = api + + async def _async_setup(self) -> None: + try: + await self.api.async_sign_in() + except AylaAuthError as e: + raise ConfigEntryAuthFailed("Credentials expired for Ayla IoT API") from e + + async def _async_update_data(self) -> dict[str, FujitsuHVAC]: + """Fetch data from api endpoint.""" + listening_entities = set(self.async_contexts()) + try: + if self.api.token_expired: + await self.api.async_sign_in() + + if self.api.token_expiring_soon: + await self.api.async_refresh_auth() + + devices = await self.api.async_get_devices() + except AylaAuthError as e: + raise ConfigEntryAuthFailed("Credentials expired for Ayla IoT API") from e + + if not listening_entities: + devices = [dev for dev in devices if isinstance(dev, FujitsuHVAC)] + else: + devices = [ + dev for dev in devices if dev.device_serial_number in listening_entities + ] + + try: + for dev in devices: + await dev.async_update() + except AylaAuthError as e: + raise ConfigEntryAuthFailed("Credentials expired for Ayla IoT API") from e + + return {d.device_serial_number: d for d in devices} diff --git a/homeassistant/components/fujitsu_fglair/manifest.json b/homeassistant/components/fujitsu_fglair/manifest.json new file mode 100644 index 00000000000..9286f7c24d9 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/manifest.json @@ -0,0 +1,9 @@ +{ + "domain": "fujitsu_fglair", + "name": "FGLair", + "codeowners": ["@crevetor"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/fujitsu_fglair", + "iot_class": "cloud_polling", + "requirements": ["ayla-iot-unofficial==1.3.1"] +} diff --git a/homeassistant/components/fujitsu_fglair/strings.json b/homeassistant/components/fujitsu_fglair/strings.json new file mode 100644 index 00000000000..8f7d775d7e4 --- /dev/null +++ b/homeassistant/components/fujitsu_fglair/strings.json @@ -0,0 +1,33 @@ +{ + "config": { + "step": { + "user": { + "title": "Enter your FGLair credentials", + "data": { + "is_europe": "Use european servers", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + }, + "data_description": { + "is_europe": "Allows the user to choose whether to use european servers or not since the API uses different endoint URLs for european vs non-european users" + } + }, + "reauth_confirm": { + "title": "[%key:common::config_flow::title::reauth%]", + "description": "Please re-enter the password for {username}:", + "data": { + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + } + } +} diff --git a/homeassistant/components/fully_kiosk/icons.json b/homeassistant/components/fully_kiosk/icons.json index 760698f7ac8..0166679abe2 100644 --- a/homeassistant/components/fully_kiosk/icons.json +++ b/homeassistant/components/fully_kiosk/icons.json @@ -1,7 +1,13 @@ { "services": { - "load_url": "mdi:link", - "set_config": "mdi:cog", - "start_application": "mdi:rocket-launch" + "load_url": { + "service": "mdi:link" + }, + "set_config": { + "service": "mdi:cog" + }, + "start_application": { + "service": "mdi:rocket-launch" + } } } diff --git a/homeassistant/components/fyta/__init__.py b/homeassistant/components/fyta/__init__.py index b666c5a1f52..efbb1453456 100644 --- a/homeassistant/components/fyta/__init__.py +++ b/homeassistant/components/fyta/__init__.py @@ -4,7 +4,6 @@ from __future__ import annotations from datetime import datetime import logging -from typing import Any from fyta_cli.fyta_connector import FytaConnector @@ -73,11 +72,11 @@ async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> fyta = FytaConnector( config_entry.data[CONF_USERNAME], config_entry.data[CONF_PASSWORD] ) - credentials: dict[str, Any] = await fyta.login() + credentials = await fyta.login() await fyta.client.close() - new[CONF_ACCESS_TOKEN] = credentials[CONF_ACCESS_TOKEN] - new[CONF_EXPIRATION] = credentials[CONF_EXPIRATION].isoformat() + new[CONF_ACCESS_TOKEN] = credentials.access_token + new[CONF_EXPIRATION] = credentials.expiration.isoformat() hass.config_entries.async_update_entry( config_entry, data=new, minor_version=2, version=1 diff --git a/homeassistant/components/fyta/config_flow.py b/homeassistant/components/fyta/config_flow.py index 4cb8bddbf10..f2b5163c9db 100644 --- a/homeassistant/components/fyta/config_flow.py +++ b/homeassistant/components/fyta/config_flow.py @@ -12,10 +12,11 @@ from fyta_cli.fyta_exceptions import ( FytaConnectionError, FytaPasswordError, ) +from fyta_cli.fyta_models import Credentials import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers.selector import ( TextSelector, TextSelectorConfig, @@ -49,14 +50,11 @@ DATA_SCHEMA = vol.Schema( class FytaConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for Fyta.""" + credentials: Credentials + _entry: FytaConfigEntry | None = None VERSION = 1 MINOR_VERSION = 2 - def __init__(self) -> None: - """Initialize FytaConfigFlow.""" - self.credentials: dict[str, Any] = {} - self._entry: FytaConfigEntry | None = None - async def async_auth(self, user_input: Mapping[str, Any]) -> dict[str, str]: """Reusable Auth Helper.""" fyta = FytaConnector(user_input[CONF_USERNAME], user_input[CONF_PASSWORD]) @@ -75,10 +73,6 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): finally: await fyta.client.close() - self.credentials[CONF_EXPIRATION] = self.credentials[ - CONF_EXPIRATION - ].isoformat() - return {} async def async_step_user( @@ -90,7 +84,10 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): self._async_abort_entries_match({CONF_USERNAME: user_input[CONF_USERNAME]}) if not (errors := await self.async_auth(user_input)): - user_input |= self.credentials + user_input |= { + CONF_ACCESS_TOKEN: self.credentials.access_token, + CONF_EXPIRATION: self.credentials.expiration.isoformat(), + } return self.async_create_entry( title=user_input[CONF_USERNAME], data=user_input ) @@ -114,7 +111,10 @@ class FytaConfigFlow(ConfigFlow, domain=DOMAIN): assert self._entry is not None if user_input and not (errors := await self.async_auth(user_input)): - user_input |= self.credentials + user_input |= { + CONF_ACCESS_TOKEN: self.credentials.access_token, + CONF_EXPIRATION: self.credentials.expiration.isoformat(), + } return self.async_update_reload_and_abort( self._entry, data={**self._entry.data, **user_input} ) diff --git a/homeassistant/components/fyta/coordinator.py b/homeassistant/components/fyta/coordinator.py index b6fbf73ec25..c92a96eed63 100644 --- a/homeassistant/components/fyta/coordinator.py +++ b/homeassistant/components/fyta/coordinator.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import datetime, timedelta import logging -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING from fyta_cli.fyta_connector import FytaConnector from fyta_cli.fyta_exceptions import ( @@ -13,6 +13,7 @@ from fyta_cli.fyta_exceptions import ( FytaPasswordError, FytaPlantError, ) +from fyta_cli.fyta_models import Plant from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.core import HomeAssistant @@ -27,7 +28,7 @@ if TYPE_CHECKING: _LOGGER = logging.getLogger(__name__) -class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]): +class FytaCoordinator(DataUpdateCoordinator[dict[int, Plant]]): """Fyta custom coordinator.""" config_entry: FytaConfigEntry @@ -44,7 +45,7 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]): async def _async_update_data( self, - ) -> dict[int, dict[str, Any]]: + ) -> dict[int, Plant]: """Fetch data from API endpoint.""" if ( @@ -60,7 +61,6 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]): async def renew_authentication(self) -> bool: """Renew access token for FYTA API.""" - credentials: dict[str, Any] = {} try: credentials = await self.fyta.login() @@ -70,8 +70,8 @@ class FytaCoordinator(DataUpdateCoordinator[dict[int, dict[str, Any]]]): raise ConfigEntryAuthFailed from ex new_config_entry = {**self.config_entry.data} - new_config_entry[CONF_ACCESS_TOKEN] = credentials[CONF_ACCESS_TOKEN] - new_config_entry[CONF_EXPIRATION] = credentials[CONF_EXPIRATION].isoformat() + new_config_entry[CONF_ACCESS_TOKEN] = credentials.access_token + new_config_entry[CONF_EXPIRATION] = credentials.expiration.isoformat() self.hass.config_entries.async_update_entry( self.config_entry, data=new_config_entry diff --git a/homeassistant/components/fyta/diagnostics.py b/homeassistant/components/fyta/diagnostics.py index 55720b75ee6..d02f8cacfa3 100644 --- a/homeassistant/components/fyta/diagnostics.py +++ b/homeassistant/components/fyta/diagnostics.py @@ -25,5 +25,5 @@ async def async_get_config_entry_diagnostics( return { "config_entry": async_redact_data(config_entry.as_dict(), TO_REDACT), - "plant_data": data, + "plant_data": {key: value.to_dict() for key, value in data.items()}, } diff --git a/homeassistant/components/fyta/entity.py b/homeassistant/components/fyta/entity.py index 681a50f4cf5..18c52d74e25 100644 --- a/homeassistant/components/fyta/entity.py +++ b/homeassistant/components/fyta/entity.py @@ -1,6 +1,6 @@ """Entities for FYTA integration.""" -from typing import Any +from fyta_cli.fyta_models import Plant from homeassistant.components.sensor import SensorEntityDescription from homeassistant.config_entries import ConfigEntry @@ -32,13 +32,13 @@ class FytaPlantEntity(CoordinatorEntity[FytaCoordinator]): manufacturer="Fyta", model="Plant", identifiers={(DOMAIN, f"{entry.entry_id}-{plant_id}")}, - name=self.plant.get("name"), - sw_version=self.plant.get("sw_version"), + name=self.plant.name, + sw_version=self.plant.sw_version, ) self.entity_description = description @property - def plant(self) -> dict[str, Any]: + def plant(self) -> Plant: """Get plant data.""" return self.coordinator.data[self.plant_id] diff --git a/homeassistant/components/fyta/manifest.json b/homeassistant/components/fyta/manifest.json index f0953dd2a33..dbd44ed34dc 100644 --- a/homeassistant/components/fyta/manifest.json +++ b/homeassistant/components/fyta/manifest.json @@ -7,5 +7,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "quality_scale": "platinum", - "requirements": ["fyta_cli==0.4.1"] + "requirements": ["fyta_cli==0.6.6"] } diff --git a/homeassistant/components/fyta/sensor.py b/homeassistant/components/fyta/sensor.py index 27576ae5065..a351d79dd8b 100644 --- a/homeassistant/components/fyta/sensor.py +++ b/homeassistant/components/fyta/sensor.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from datetime import datetime from typing import Final -from fyta_cli.fyta_connector import PLANT_MEASUREMENT_STATUS, PLANT_STATUS +from fyta_cli.fyta_models import Plant from homeassistant.components.sensor import ( SensorDeviceClass, @@ -23,19 +23,18 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType from . import FytaConfigEntry from .coordinator import FytaCoordinator from .entity import FytaPlantEntity -@dataclass(frozen=True) +@dataclass(frozen=True, kw_only=True) class FytaSensorEntityDescription(SensorEntityDescription): """Describes Fyta sensor entity.""" - value_fn: Callable[[str | int | float | datetime], str | int | float | datetime] = ( - lambda value: value - ) + value_fn: Callable[[Plant], StateType | datetime] PLANT_STATUS_LIST: list[str] = ["deleted", "doing_great", "need_attention", "no_sensor"] @@ -48,63 +47,68 @@ PLANT_MEASUREMENT_STATUS_LIST: list[str] = [ "too_high", ] + SENSORS: Final[list[FytaSensorEntityDescription]] = [ FytaSensorEntityDescription( key="scientific_name", translation_key="scientific_name", + value_fn=lambda plant: plant.scientific_name, ), FytaSensorEntityDescription( key="status", translation_key="plant_status", device_class=SensorDeviceClass.ENUM, options=PLANT_STATUS_LIST, - value_fn=PLANT_STATUS.get, + value_fn=lambda plant: plant.status.name.lower(), ), FytaSensorEntityDescription( key="temperature_status", translation_key="temperature_status", device_class=SensorDeviceClass.ENUM, options=PLANT_MEASUREMENT_STATUS_LIST, - value_fn=PLANT_MEASUREMENT_STATUS.get, + value_fn=lambda plant: plant.temperature_status.name.lower(), ), FytaSensorEntityDescription( key="light_status", translation_key="light_status", device_class=SensorDeviceClass.ENUM, options=PLANT_MEASUREMENT_STATUS_LIST, - value_fn=PLANT_MEASUREMENT_STATUS.get, + value_fn=lambda plant: plant.light_status.name.lower(), ), FytaSensorEntityDescription( key="moisture_status", translation_key="moisture_status", device_class=SensorDeviceClass.ENUM, options=PLANT_MEASUREMENT_STATUS_LIST, - value_fn=PLANT_MEASUREMENT_STATUS.get, + value_fn=lambda plant: plant.moisture_status.name.lower(), ), FytaSensorEntityDescription( key="salinity_status", translation_key="salinity_status", device_class=SensorDeviceClass.ENUM, options=PLANT_MEASUREMENT_STATUS_LIST, - value_fn=PLANT_MEASUREMENT_STATUS.get, + value_fn=lambda plant: plant.salinity_status.name.lower(), ), FytaSensorEntityDescription( key="temperature", native_unit_of_measurement=UnitOfTemperature.CELSIUS, device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.temperature, ), FytaSensorEntityDescription( key="light", translation_key="light", native_unit_of_measurement="μmol/s⋅m²", state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.light, ), FytaSensorEntityDescription( key="moisture", native_unit_of_measurement=PERCENTAGE, device_class=SensorDeviceClass.MOISTURE, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.moisture, ), FytaSensorEntityDescription( key="salinity", @@ -112,11 +116,13 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [ native_unit_of_measurement=UnitOfConductivity.MILLISIEMENS, device_class=SensorDeviceClass.CONDUCTIVITY, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.salinity, ), FytaSensorEntityDescription( key="ph", device_class=SensorDeviceClass.PH, state_class=SensorStateClass.MEASUREMENT, + value_fn=lambda plant: plant.ph, ), FytaSensorEntityDescription( key="battery_level", @@ -124,6 +130,7 @@ SENSORS: Final[list[FytaSensorEntityDescription]] = [ device_class=SensorDeviceClass.BATTERY, state_class=SensorStateClass.MEASUREMENT, entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda plant: plant.battery_level, ), ] @@ -138,7 +145,7 @@ async def async_setup_entry( FytaPlantSensor(coordinator, entry, sensor, plant_id) for plant_id in coordinator.fyta.plant_list for sensor in SENSORS - if sensor.key in coordinator.data[plant_id] + if sensor.key in dir(coordinator.data.get(plant_id)) ] async_add_entities(plant_entities) @@ -150,8 +157,7 @@ class FytaPlantSensor(FytaPlantEntity, SensorEntity): entity_description: FytaSensorEntityDescription @property - def native_value(self) -> str | int | float | datetime: + def native_value(self) -> StateType | datetime: """Return the state for this sensor.""" - val = self.plant[self.entity_description.key] - return self.entity_description.value_fn(val) + return self.entity_description.value_fn(self.plant) diff --git a/homeassistant/components/gardena_bluetooth/manifest.json b/homeassistant/components/gardena_bluetooth/manifest.json index 4812def7dde..6d7566b3edf 100644 --- a/homeassistant/components/gardena_bluetooth/manifest.json +++ b/homeassistant/components/gardena_bluetooth/manifest.json @@ -14,5 +14,5 @@ "documentation": "https://www.home-assistant.io/integrations/gardena_bluetooth", "iot_class": "local_polling", "loggers": ["bleak", "bleak_esphome", "gardena_bluetooth"], - "requirements": ["gardena-bluetooth==1.4.2"] + "requirements": ["gardena-bluetooth==1.4.3"] } diff --git a/homeassistant/components/generic/camera.py b/homeassistant/components/generic/camera.py index 80971760b85..3aac5145ca5 100644 --- a/homeassistant/components/generic/camera.py +++ b/homeassistant/components/generic/camera.py @@ -28,10 +28,10 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import TemplateError -from homeassistant.helpers import config_validation as cv, template as template_helper from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.httpx_client import get_async_client +from homeassistant.helpers.template import Template from . import DOMAIN from .const import ( @@ -91,18 +91,11 @@ class GenericCamera(Camera): self._password = device_info.get(CONF_PASSWORD) self._name = device_info.get(CONF_NAME, title) self._still_image_url = device_info.get(CONF_STILL_IMAGE_URL) - if ( - not isinstance(self._still_image_url, template_helper.Template) - and self._still_image_url - ): - self._still_image_url = cv.template(self._still_image_url) if self._still_image_url: - self._still_image_url.hass = hass + self._still_image_url = Template(self._still_image_url, hass) self._stream_source = device_info.get(CONF_STREAM_SOURCE) if self._stream_source: - if not isinstance(self._stream_source, template_helper.Template): - self._stream_source = cv.template(self._stream_source) - self._stream_source.hass = hass + self._stream_source = Template(self._stream_source, hass) self._limit_refetch = device_info[CONF_LIMIT_REFETCH_TO_URL_CHANGE] self._attr_frame_interval = 1 / device_info[CONF_FRAMERATE] if self._stream_source: diff --git a/homeassistant/components/generic/icons.json b/homeassistant/components/generic/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/generic/icons.json +++ b/homeassistant/components/generic/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/generic_thermostat/climate.py b/homeassistant/components/generic_thermostat/climate.py index c142d15f9e5..2a118b70879 100644 --- a/homeassistant/components/generic_thermostat/climate.py +++ b/homeassistant/components/generic_thermostat/climate.py @@ -485,7 +485,7 @@ class GenericThermostat(ClimateEntity, RestoreEntity): try: cur_temp = float(state.state) if not math.isfinite(cur_temp): - raise ValueError(f"Sensor has illegal state {state.state}") + raise ValueError(f"Sensor has illegal state {state.state}") # noqa: TRY301 self._cur_temp = cur_temp except ValueError as ex: _LOGGER.error("Unable to update from sensor: %s", ex) diff --git a/homeassistant/components/generic_thermostat/icons.json b/homeassistant/components/generic_thermostat/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/generic_thermostat/icons.json +++ b/homeassistant/components/generic_thermostat/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/geniushub/config_flow.py b/homeassistant/components/geniushub/config_flow.py index 5f026c91ee1..601eac6c2f2 100644 --- a/homeassistant/components/geniushub/config_flow.py +++ b/homeassistant/components/geniushub/config_flow.py @@ -124,12 +124,12 @@ class GeniusHubConfigFlow(ConfigFlow, domain=DOMAIN): step_id="cloud_api", errors=errors, data_schema=CLOUD_API_SCHEMA ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import the yaml config.""" - if CONF_HOST in user_input: - result = await self.async_step_local_api(user_input) + if CONF_HOST in import_data: + result = await self.async_step_local_api(import_data) else: - result = await self.async_step_cloud_api(user_input) + result = await self.async_step_cloud_api(import_data) if result["type"] is FlowResultType.FORM: assert result["errors"] return self.async_abort(reason=result["errors"]["base"]) diff --git a/homeassistant/components/geniushub/icons.json b/homeassistant/components/geniushub/icons.json index 41697b419a8..c8a59dedbbd 100644 --- a/homeassistant/components/geniushub/icons.json +++ b/homeassistant/components/geniushub/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_zone_mode": "mdi:auto-mode", - "set_zone_override": "mdi:thermometer-lines", - "set_switch_override": "mdi:toggle-switch-variant" + "set_zone_mode": { + "service": "mdi:auto-mode" + }, + "set_zone_override": { + "service": "mdi:thermometer-lines" + }, + "set_switch_override": { + "service": "mdi:toggle-switch-variant" + } } } diff --git a/homeassistant/components/geonetnz_quakes/config_flow.py b/homeassistant/components/geonetnz_quakes/config_flow.py index 4367f820bd3..083ac29b362 100644 --- a/homeassistant/components/geonetnz_quakes/config_flow.py +++ b/homeassistant/components/geonetnz_quakes/config_flow.py @@ -1,10 +1,11 @@ """Config flow to configure the GeoNet NZ Quakes integration.""" import logging +from typing import Any import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -44,11 +45,13 @@ class GeonetnzQuakesFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors or {} ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" _LOGGER.debug("User input: %s", user_input) if not user_input: diff --git a/homeassistant/components/geonetnz_volcano/config_flow.py b/homeassistant/components/geonetnz_volcano/config_flow.py index 461da61ae1a..45a074d215c 100644 --- a/homeassistant/components/geonetnz_volcano/config_flow.py +++ b/homeassistant/components/geonetnz_volcano/config_flow.py @@ -1,8 +1,10 @@ """Config flow to configure the GeoNet NZ Volcano integration.""" +from typing import Any + import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -45,11 +47,13 @@ class GeonetnzVolcanoFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=data_schema, errors=errors or {} ) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" if not user_input: return await self._show_form() diff --git a/homeassistant/components/glances/__init__.py b/homeassistant/components/glances/__init__.py index f83b39d1cf9..0ddd8a86979 100644 --- a/homeassistant/components/glances/__init__.py +++ b/homeassistant/components/glances/__init__.py @@ -27,7 +27,6 @@ from homeassistant.exceptions import ( ConfigEntryNotReady, HomeAssistantError, ) -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue @@ -36,7 +35,6 @@ from .coordinator import GlancesDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/goodwe/config_flow.py b/homeassistant/components/goodwe/config_flow.py index d6a3be7e56a..354877e782f 100644 --- a/homeassistant/components/goodwe/config_flow.py +++ b/homeassistant/components/goodwe/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from goodwe import InverterError, connect import voluptuous as vol @@ -26,7 +27,9 @@ class GoodweFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult: + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/goodwe/sensor.py b/homeassistant/components/goodwe/sensor.py index 795b26a0c9f..03912c9a1ec 100644 --- a/homeassistant/components/goodwe/sensor.py +++ b/homeassistant/components/goodwe/sensor.py @@ -20,7 +20,6 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, EntityCategory, UnitOfApparentPower, UnitOfElectricCurrent, @@ -28,6 +27,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfTemperature, UnitOfTime, ) @@ -126,7 +126,7 @@ _DESCRIPTIONS: dict[str, GoodweSensorEntityDescription] = { key="var", device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, entity_registry_enabled_default=False, ), "C": GoodweSensorEntityDescription( diff --git a/homeassistant/components/google/config_flow.py b/homeassistant/components/google/config_flow.py index 6207303c8a6..98424ef24f5 100644 --- a/homeassistant/components/google/config_flow.py +++ b/homeassistant/components/google/config_flow.py @@ -94,18 +94,6 @@ class OAuth2FlowHandler( "prompt": "consent", } - async def async_step_import(self, info: dict[str, Any]) -> ConfigFlowResult: - """Import existing auth into a new config entry.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - implementations = await config_entry_oauth2_flow.async_get_implementations( - self.hass, self.DOMAIN - ) - assert len(implementations) == 1 - self.flow_impl = list(implementations.values())[0] - self.external_data = info - return await super().async_step_creation(info) - async def async_step_auth( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/google/icons.json b/homeassistant/components/google/icons.json index 6dbad61b43d..e4f25442546 100644 --- a/homeassistant/components/google/icons.json +++ b/homeassistant/components/google/icons.json @@ -1,6 +1,10 @@ { "services": { - "add_event": "mdi:calendar-plus", - "create_event": "mdi:calendar-plus" + "add_event": { + "service": "mdi:calendar-plus" + }, + "create_event": { + "service": "mdi:calendar-plus" + } } } diff --git a/homeassistant/components/google_assistant/config_flow.py b/homeassistant/components/google_assistant/config_flow.py index 9504c623138..5934657f9ae 100644 --- a/homeassistant/components/google_assistant/config_flow.py +++ b/homeassistant/components/google_assistant/config_flow.py @@ -1,6 +1,8 @@ """Config flow for google assistant component.""" -from homeassistant.config_entries import ConfigFlow +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import CONF_PROJECT_ID, DOMAIN @@ -10,10 +12,10 @@ class GoogleAssistantHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" - await self.async_set_unique_id(unique_id=user_input[CONF_PROJECT_ID]) + await self.async_set_unique_id(unique_id=import_data[CONF_PROJECT_ID]) self._abort_if_unique_id_configured() return self.async_create_entry( - title=user_input[CONF_PROJECT_ID], data=user_input + title=import_data[CONF_PROJECT_ID], data=import_data ) diff --git a/homeassistant/components/google_assistant/icons.json b/homeassistant/components/google_assistant/icons.json index 3bcab03d2c2..a522103328a 100644 --- a/homeassistant/components/google_assistant/icons.json +++ b/homeassistant/components/google_assistant/icons.json @@ -1,5 +1,7 @@ { "services": { - "request_sync": "mdi:sync" + "request_sync": { + "service": "mdi:sync" + } } } diff --git a/homeassistant/components/google_assistant/trait.py b/homeassistant/components/google_assistant/trait.py index e54684fbc64..145eb4b2935 100644 --- a/homeassistant/components/google_assistant/trait.py +++ b/homeassistant/components/google_assistant/trait.py @@ -294,7 +294,7 @@ class _Trait(ABC): self.state = state self.config = config - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" raise NotImplementedError @@ -302,7 +302,7 @@ class _Trait(ABC): """Add options for the sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" raise NotImplementedError @@ -337,11 +337,11 @@ class BrightnessTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return brightness attributes for a sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return brightness query attributes.""" domain = self.state.domain response = {} @@ -388,7 +388,7 @@ class CameraStreamTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return stream attributes for a sync request.""" return { "cameraStreamSupportedProtocols": ["hls"], @@ -396,7 +396,7 @@ class CameraStreamTrait(_Trait): "cameraStreamNeedDrmEncryption": False, } - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return camera stream attributes.""" return self.stream_info or {} @@ -426,7 +426,7 @@ class ObjectDetection(_Trait): domain == event.DOMAIN and device_class == event.EventDeviceClass.DOORBELL ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return ObjectDetection attributes for a sync request.""" return {} @@ -434,7 +434,7 @@ class ObjectDetection(_Trait): """Add options for the sync request.""" return {"notificationSupportedByAgent": True} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return ObjectDetection query attributes.""" return {} @@ -498,13 +498,13 @@ class OnOffTrait(_Trait): humidifier.DOMAIN, ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return OnOff attributes for a sync request.""" if self.state.attributes.get(ATTR_ASSUMED_STATE, False): return {"commandOnlyOnOff": True} return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return OnOff query attributes.""" return {"on": self.state.state not in (STATE_OFF, STATE_UNKNOWN)} @@ -548,11 +548,11 @@ class ColorSettingTrait(_Trait): color_modes ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return color temperature attributes for a sync request.""" attrs = self.state.attributes color_modes = attrs.get(light.ATTR_SUPPORTED_COLOR_MODES) - response = {} + response: dict[str, Any] = {} if light.color_supported(color_modes): response["colorModel"] = "hsv" @@ -571,11 +571,11 @@ class ColorSettingTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return color temperature query attributes.""" color_mode = self.state.attributes.get(light.ATTR_COLOR_MODE) - color = {} + color: dict[str, Any] = {} if light.color_supported([color_mode]): color_hs = self.state.attributes.get(light.ATTR_HS_COLOR) @@ -684,12 +684,12 @@ class SceneTrait(_Trait): script.DOMAIN, ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return scene attributes for a sync request.""" # None of the supported domains can support sceneReversible return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return scene query attributes.""" return {} @@ -728,11 +728,11 @@ class DockTrait(_Trait): """Test if state is supported.""" return domain == vacuum.DOMAIN - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return dock attributes for a sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return dock query attributes.""" return {"isDocked": self.state.state == vacuum.STATE_DOCKED} @@ -762,11 +762,11 @@ class LocatorTrait(_Trait): """Test if state is supported.""" return domain == vacuum.DOMAIN and features & VacuumEntityFeature.LOCATE - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return locator attributes for a sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return locator query attributes.""" return {} @@ -802,14 +802,14 @@ class EnergyStorageTrait(_Trait): """Test if state is supported.""" return domain == vacuum.DOMAIN and features & VacuumEntityFeature.BATTERY - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return EnergyStorage attributes for a sync request.""" return { "isRechargeable": True, "queryOnlyEnergyStorage": True, } - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return EnergyStorage query attributes.""" battery_level = self.state.attributes.get(ATTR_BATTERY_LEVEL) if battery_level is None: @@ -866,7 +866,7 @@ class StartStopTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return StartStop attributes for a sync request.""" domain = self.state.domain if domain == vacuum.DOMAIN: @@ -878,7 +878,9 @@ class StartStopTrait(_Trait): if domain in COVER_VALVE_DOMAINS: return {} - def query_attributes(self): + raise NotImplementedError(f"Unsupported domain {domain}") + + def query_attributes(self) -> dict[str, Any]: """Return StartStop query attributes.""" domain = self.state.domain state = self.state.state @@ -898,13 +900,17 @@ class StartStopTrait(_Trait): ) } + raise NotImplementedError(f"Unsupported domain {domain}") + async def execute(self, command, data, params, challenge): """Execute a StartStop command.""" domain = self.state.domain if domain == vacuum.DOMAIN: - return await self._execute_vacuum(command, data, params, challenge) + await self._execute_vacuum(command, data, params, challenge) + return if domain in COVER_VALVE_DOMAINS: - return await self._execute_cover_or_valve(command, data, params, challenge) + await self._execute_cover_or_valve(command, data, params, challenge) + return async def _execute_vacuum(self, command, data, params, challenge): """Execute a StartStop command.""" @@ -1006,7 +1012,7 @@ class TemperatureControlTrait(_Trait): and device_class == sensor.SensorDeviceClass.TEMPERATURE ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return temperature attributes for a sync request.""" response = {} domain = self.state.domain @@ -1042,7 +1048,7 @@ class TemperatureControlTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return temperature states.""" response = {} domain = self.state.domain @@ -1168,7 +1174,7 @@ class TemperatureSettingTrait(_Trait): return modes - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return temperature point and modes attributes for a sync request.""" response = {} attrs = self.state.attributes @@ -1211,9 +1217,9 @@ class TemperatureSettingTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return temperature point and modes query attributes.""" - response = {} + response: dict[str, Any] = {} attrs = self.state.attributes unit = self.hass.config.units.temperature_unit @@ -1426,9 +1432,9 @@ class HumiditySettingTrait(_Trait): and device_class == sensor.SensorDeviceClass.HUMIDITY ) - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return humidity attributes for a sync request.""" - response = {} + response: dict[str, Any] = {} attrs = self.state.attributes domain = self.state.domain @@ -1449,7 +1455,7 @@ class HumiditySettingTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return humidity query attributes.""" response = {} attrs = self.state.attributes @@ -1458,9 +1464,9 @@ class HumiditySettingTrait(_Trait): if domain == sensor.DOMAIN: device_class = attrs.get(ATTR_DEVICE_CLASS) if device_class == sensor.SensorDeviceClass.HUMIDITY: - current_humidity = self.state.state - if current_humidity not in (STATE_UNKNOWN, STATE_UNAVAILABLE): - response["humidityAmbientPercent"] = round(float(current_humidity)) + humidity_state = self.state.state + if humidity_state not in (STATE_UNKNOWN, STATE_UNAVAILABLE): + response["humidityAmbientPercent"] = round(float(humidity_state)) elif domain == humidifier.DOMAIN: target_humidity: int | None = attrs.get(humidifier.ATTR_HUMIDITY) @@ -1512,11 +1518,11 @@ class LockUnlockTrait(_Trait): """Return if the trait might ask for 2FA.""" return True - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return LockUnlock attributes for a sync request.""" return {} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return LockUnlock query attributes.""" if self.state.state == STATE_JAMMED: return {"isJammed": True} @@ -1598,7 +1604,7 @@ class ArmDisArmTrait(_Trait): return states[0] - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return ArmDisarm attributes for a sync request.""" response = {} levels = [] @@ -1618,7 +1624,7 @@ class ArmDisArmTrait(_Trait): response["availableArmLevels"] = {"levels": levels, "ordered": True} return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return ArmDisarm query attributes.""" armed_state = self.state.attributes.get("next_state", self.state.state) @@ -1715,11 +1721,11 @@ class FanSpeedTrait(_Trait): return features & ClimateEntityFeature.FAN_MODE return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return speed point and modes attributes for a sync request.""" domain = self.state.domain speeds = [] - result = {} + result: dict[str, Any] = {} if domain == fan.DOMAIN: reversible = bool( @@ -1764,7 +1770,7 @@ class FanSpeedTrait(_Trait): return result - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return speed point and modes query attributes.""" attrs = self.state.attributes @@ -1910,7 +1916,7 @@ class ModesTrait(_Trait): ) return mode - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return mode attributes for a sync request.""" modes = [] @@ -1934,10 +1940,10 @@ class ModesTrait(_Trait): return {"availableModes": modes} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return current modes.""" attrs = self.state.attributes - response = {} + response: dict[str, Any] = {} mode_settings = {} if self.state.domain == fan.DOMAIN: @@ -2098,7 +2104,7 @@ class InputSelectorTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return mode attributes for a sync request.""" attrs = self.state.attributes sourcelist: list[str] = attrs.get(media_player.ATTR_INPUT_SOURCE_LIST) or [] @@ -2109,7 +2115,7 @@ class InputSelectorTrait(_Trait): return {"availableInputs": inputs, "orderedInputs": True} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return current modes.""" attrs = self.state.attributes return {"currentInput": attrs.get(media_player.ATTR_INPUT_SOURCE, "")} @@ -2179,7 +2185,7 @@ class OpenCloseTrait(_Trait): """Return if the trait might ask for 2FA.""" return domain == cover.DOMAIN and device_class in OpenCloseTrait.COVER_2FA - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return opening direction.""" response = {} features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) @@ -2215,10 +2221,10 @@ class OpenCloseTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return state query attributes.""" domain = self.state.domain - response = {} + response: dict[str, Any] = {} # When it's an assumed state, we will return empty state # This shouldn't happen because we set `commandOnlyOpenClose` @@ -2324,7 +2330,7 @@ class VolumeTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return volume attributes for a sync request.""" features = self.state.attributes.get(ATTR_SUPPORTED_FEATURES, 0) return { @@ -2341,7 +2347,7 @@ class VolumeTrait(_Trait): "levelStepSize": 10, } - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return volume query attributes.""" response = {} @@ -2504,7 +2510,7 @@ class TransportControlTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return opening direction.""" response = {} @@ -2519,7 +2525,7 @@ class TransportControlTrait(_Trait): return response - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" return {} @@ -2618,11 +2624,11 @@ class MediaStateTrait(_Trait): """Test if state is supported.""" return domain == media_player.DOMAIN - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" return {"supportActivityState": True, "supportPlaybackState": True} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" return { "activityState": self.activity_lookup.get(self.state.state, "INACTIVE"), @@ -2652,11 +2658,11 @@ class ChannelTrait(_Trait): return False - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" return {"availableChannels": [], "commandOnlyChannels": True} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return channel query attributes.""" return {} @@ -2729,7 +2735,7 @@ class SensorStateTrait(_Trait): """Test if state is supported.""" return domain == sensor.DOMAIN and device_class in cls.sensor_types - def sync_attributes(self): + def sync_attributes(self) -> dict[str, Any]: """Return attributes for a sync request.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) data = self.sensor_types.get(device_class) @@ -2757,7 +2763,7 @@ class SensorStateTrait(_Trait): return {"sensorStatesSupported": [sensor_state]} - def query_attributes(self): + def query_attributes(self) -> dict[str, Any]: """Return the attributes of this trait for this entity.""" device_class = self.state.attributes.get(ATTR_DEVICE_CLASS) data = self.sensor_types.get(device_class) diff --git a/homeassistant/components/google_assistant_sdk/icons.json b/homeassistant/components/google_assistant_sdk/icons.json index bf1420b2e3f..75747c43f5b 100644 --- a/homeassistant/components/google_assistant_sdk/icons.json +++ b/homeassistant/components/google_assistant_sdk/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_text_command": "mdi:comment-text-outline" + "send_text_command": { + "service": "mdi:comment-text-outline" + } } } diff --git a/homeassistant/components/google_generative_ai_conversation/icons.json b/homeassistant/components/google_generative_ai_conversation/icons.json index 6544532783a..6ac3cc3b21c 100644 --- a/homeassistant/components/google_generative_ai_conversation/icons.json +++ b/homeassistant/components/google_generative_ai_conversation/icons.json @@ -1,5 +1,7 @@ { "services": { - "generate_content": "mdi:receipt-text" + "generate_content": { + "service": "mdi:receipt-text" + } } } diff --git a/homeassistant/components/google_mail/icons.json b/homeassistant/components/google_mail/icons.json index 599ccffe3c7..d0a6eb33715 100644 --- a/homeassistant/components/google_mail/icons.json +++ b/homeassistant/components/google_mail/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_vacation": "mdi:beach" + "set_vacation": { + "service": "mdi:beach" + } } } diff --git a/homeassistant/components/google_sheets/icons.json b/homeassistant/components/google_sheets/icons.json index c8010a690be..e2b6ed57579 100644 --- a/homeassistant/components/google_sheets/icons.json +++ b/homeassistant/components/google_sheets/icons.json @@ -1,5 +1,7 @@ { "services": { - "append_sheet": "mdi:google-spreadsheet" + "append_sheet": { + "service": "mdi:google-spreadsheet" + } } } diff --git a/homeassistant/components/google_translate/tts.py b/homeassistant/components/google_translate/tts.py index 221c99e7c20..13e0ca4c273 100644 --- a/homeassistant/components/google_translate/tts.py +++ b/homeassistant/components/google_translate/tts.py @@ -74,7 +74,7 @@ class GoogleTTSEntity(TextToSpeechEntity): else: self._lang = lang self._tld = tld - self._attr_name = f"Google {self._lang} {self._tld}" + self._attr_name = f"Google Translate {self._lang} {self._tld}" self._attr_unique_id = config_entry.entry_id @property @@ -130,7 +130,7 @@ class GoogleProvider(Provider): else: self._lang = lang self._tld = tld - self.name = "Google" + self.name = "Google Translate" @property def default_language(self) -> str: diff --git a/homeassistant/components/gpsd/config_flow.py b/homeassistant/components/gpsd/config_flow.py index 59c95d0ddbf..ac41324f857 100644 --- a/homeassistant/components/gpsd/config_flow.py +++ b/homeassistant/components/gpsd/config_flow.py @@ -39,10 +39,6 @@ class GPSDConfigFlow(ConfigFlow, domain=DOMAIN): else: return True - async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_data) - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/gpsd/icons.json b/homeassistant/components/gpsd/icons.json index b29640e0001..59d904f918c 100644 --- a/homeassistant/components/gpsd/icons.json +++ b/homeassistant/components/gpsd/icons.json @@ -7,6 +7,15 @@ "2d_fix": "mdi:crosshairs-gps", "3d_fix": "mdi:crosshairs-gps" } + }, + "latitude": { + "default": "mdi:latitude" + }, + "longitude": { + "default": "mdi:longitude" + }, + "elevation": { + "default": "mdi:arrow-up-down" } } } diff --git a/homeassistant/components/gpsd/sensor.py b/homeassistant/components/gpsd/sensor.py index e67287ae134..1bac41ecaae 100644 --- a/homeassistant/components/gpsd/sensor.py +++ b/homeassistant/components/gpsd/sensor.py @@ -4,38 +4,31 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from datetime import datetime import logging from typing import Any -from gps3.agps3threaded import ( - GPSD_PORT as DEFAULT_PORT, - HOST as DEFAULT_HOST, - AGPS3mechanism, -) -import voluptuous as vol +from gps3.agps3threaded import AGPS3mechanism from homeassistant.components.sensor import ( - PLATFORM_SCHEMA as SENSOR_PLATFORM_SCHEMA, SensorDeviceClass, SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import ( ATTR_LATITUDE, ATTR_LONGITUDE, ATTR_MODE, - CONF_HOST, - CONF_NAME, - CONF_PORT, + ATTR_TIME, EntityCategory, + UnitOfLength, + UnitOfSpeed, ) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.config_validation as cv +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.helpers.typing import StateType +from homeassistant.util import dt as dt_util from . import GPSDConfigEntry from .const import DOMAIN @@ -56,27 +49,73 @@ _MODE_VALUES = {2: "2d_fix", 3: "3d_fix"} class GpsdSensorDescription(SensorEntityDescription): """Class describing GPSD sensor entities.""" - value_fn: Callable[[AGPS3mechanism], str | None] + value_fn: Callable[[AGPS3mechanism], StateType | datetime] SENSOR_TYPES: tuple[GpsdSensorDescription, ...] = ( GpsdSensorDescription( - key="mode", - translation_key="mode", + key=ATTR_MODE, + translation_key=ATTR_MODE, name=None, entity_category=EntityCategory.DIAGNOSTIC, device_class=SensorDeviceClass.ENUM, options=list(_MODE_VALUES.values()), value_fn=lambda agps_thread: _MODE_VALUES.get(agps_thread.data_stream.mode), ), -) - -PLATFORM_SCHEMA = SENSOR_PLATFORM_SCHEMA.extend( - { - vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, - vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string, - vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port, - } + GpsdSensorDescription( + key=ATTR_LATITUDE, + translation_key=ATTR_LATITUDE, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda agps_thread: agps_thread.data_stream.lat, + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_LONGITUDE, + translation_key=ATTR_LONGITUDE, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda agps_thread: agps_thread.data_stream.lon, + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_ELEVATION, + translation_key=ATTR_ELEVATION, + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.DISTANCE, + native_unit_of_measurement=UnitOfLength.METERS, + value_fn=lambda agps_thread: agps_thread.data_stream.alt, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_TIME, + translation_key=ATTR_TIME, + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.TIMESTAMP, + value_fn=lambda agps_thread: dt_util.parse_datetime( + agps_thread.data_stream.time + ), + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_SPEED, + translation_key=ATTR_SPEED, + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.SPEED, + native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, + value_fn=lambda agps_thread: agps_thread.data_stream.speed, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), + GpsdSensorDescription( + key=ATTR_CLIMB, + translation_key=ATTR_CLIMB, + entity_category=EntityCategory.DIAGNOSTIC, + device_class=SensorDeviceClass.SPEED, + native_unit_of_measurement=UnitOfSpeed.METERS_PER_SECOND, + value_fn=lambda agps_thread: agps_thread.data_stream.climb, + suggested_display_precision=2, + entity_registry_enabled_default=False, + ), ) @@ -98,34 +137,6 @@ async def async_setup_entry( ) -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Initialize gpsd import from config.""" - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - is_fixable=False, - breaks_in_ha_version="2024.9.0", - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "GPSD", - }, - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_IMPORT}, data=config - ) - ) - - class GpsdSensor(SensorEntity): """Representation of a GPS receiver available via GPSD.""" @@ -150,13 +161,19 @@ class GpsdSensor(SensorEntity): self.agps_thread = agps_thread @property - def native_value(self) -> str | None: + def native_value(self) -> StateType | datetime: """Return the state of GPSD.""" - return self.entity_description.value_fn(self.agps_thread) + value = self.entity_description.value_fn(self.agps_thread) + return None if value == "n/a" else value + # Deprecated since Home Assistant 2024.9.0 + # Can be removed completely in 2025.3.0 @property - def extra_state_attributes(self) -> dict[str, Any]: + def extra_state_attributes(self) -> dict[str, Any] | None: """Return the state attributes of the GPS.""" + if self.entity_description.key != ATTR_MODE: + return None + return { ATTR_LATITUDE: self.agps_thread.data_stream.lat, ATTR_LONGITUDE: self.agps_thread.data_stream.lon, diff --git a/homeassistant/components/gpsd/strings.json b/homeassistant/components/gpsd/strings.json index 20dc283a8bb..867edf0b5a8 100644 --- a/homeassistant/components/gpsd/strings.json +++ b/homeassistant/components/gpsd/strings.json @@ -18,7 +18,15 @@ }, "entity": { "sensor": { + "latitude": { "name": "[%key:common::config_flow::data::latitude%]" }, + "longitude": { "name": "[%key:common::config_flow::data::longitude%]" }, + "elevation": { "name": "[%key:common::config_flow::data::elevation%]" }, + "time": { + "name": "[%key:component::time_date::selector::display_options::options::time%]" + }, + "climb": { "name": "Climb" }, "mode": { + "name": "[%key:common::config_flow::data::mode%]", "state": { "2d_fix": "2D Fix", "3d_fix": "3D Fix" @@ -28,11 +36,19 @@ "longitude": { "name": "[%key:common::config_flow::data::longitude%]" }, - "elevation": { "name": "Elevation" }, - "gps_time": { "name": "Time" }, - "speed": { "name": "Speed" }, - "climb": { "name": "Climb" }, - "mode": { "name": "Mode" } + "elevation": { + "name": "[%key:common::config_flow::data::elevation%]" + }, + "gps_time": { + "name": "[%key:component::time_date::selector::display_options::options::time%]" + }, + "speed": { + "name": "[%key:component::sensor::entity_component::speed::name%]" + }, + "climb": { + "name": "[%key:component::gpsd::entity::sensor::climb::name%]" + }, + "mode": { "name": "[%key:common::config_flow::data::mode%]" } } } } diff --git a/homeassistant/components/group/icons.json b/homeassistant/components/group/icons.json index 8cca94e08e1..577d1effac0 100644 --- a/homeassistant/components/group/icons.json +++ b/homeassistant/components/group/icons.json @@ -1,7 +1,13 @@ { "services": { - "reload": "mdi:reload", - "set": "mdi:home-group-plus", - "remove": "mdi:home-group-remove" + "reload": { + "service": "mdi:reload" + }, + "set": { + "service": "mdi:home-group-plus" + }, + "remove": { + "service": "mdi:home-group-remove" + } } } diff --git a/homeassistant/components/group/sensor.py b/homeassistant/components/group/sensor.py index eaaedcf0e46..a99ed9dad63 100644 --- a/homeassistant/components/group/sensor.py +++ b/homeassistant/components/group/sensor.py @@ -406,7 +406,7 @@ class SensorGroup(GroupEntity, SensorEntity): and (uom := state.attributes["unit_of_measurement"]) not in self._valid_units ): - raise HomeAssistantError("Not a valid unit") + raise HomeAssistantError("Not a valid unit") # noqa: TRY301 sensor_values.append((entity_id, numeric_state, state)) if entity_id in self._state_incorrect: diff --git a/homeassistant/components/growatt_server/config_flow.py b/homeassistant/components/growatt_server/config_flow.py index 95002a70a95..8123d7ff067 100644 --- a/homeassistant/components/growatt_server/config_flow.py +++ b/homeassistant/components/growatt_server/config_flow.py @@ -1,9 +1,11 @@ """Config flow for growatt server integration.""" +from typing import Any + import growattServer import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME, CONF_PASSWORD, CONF_URL, CONF_USERNAME from homeassistant.core import callback @@ -21,11 +23,11 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialise growatt server flow.""" - self.api = None + self.api: growattServer.GrowattApi | None = None self.user_id = None - self.data = {} + self.data: dict[str, Any] = {} @callback def _async_show_user_form(self, errors=None): @@ -42,7 +44,9 @@ class GrowattServerConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=data_schema, errors=errors ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow.""" if not user_input: return self._async_show_user_form() diff --git a/homeassistant/components/guardian/icons.json b/homeassistant/components/guardian/icons.json index 4740366e993..fe44eb0460b 100644 --- a/homeassistant/components/guardian/icons.json +++ b/homeassistant/components/guardian/icons.json @@ -18,8 +18,14 @@ } }, "services": { - "pair_sensor": "mdi:link-variant", - "unpair_sensor": "mdi:link-variant-remove", - "upgrade_firmware": "mdi:update" + "pair_sensor": { + "service": "mdi:link-variant" + }, + "unpair_sensor": { + "service": "mdi:link-variant-remove" + }, + "upgrade_firmware": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/habitica/button.py b/homeassistant/components/habitica/button.py index cdd166a4444..276aa4e7fc0 100644 --- a/homeassistant/components/habitica/button.py +++ b/homeassistant/components/habitica/button.py @@ -113,7 +113,7 @@ class HabiticaButton(HabiticaBase, ButtonEntity): translation_key="service_call_exception", ) from e else: - await self.coordinator.async_refresh() + await self.coordinator.async_request_refresh() @property def available(self) -> bool: diff --git a/homeassistant/components/habitica/config_flow.py b/homeassistant/components/habitica/config_flow.py index 5dd9fb2aa22..a40261c0902 100644 --- a/homeassistant/components/habitica/config_flow.py +++ b/homeassistant/components/habitica/config_flow.py @@ -3,12 +3,13 @@ from __future__ import annotations import logging +from typing import Any from aiohttp import ClientResponseError from habitipy.aio import HabitipyAsync import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_NAME, CONF_URL from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -56,7 +57,9 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} @@ -79,7 +82,7 @@ class HabiticaConfigFlow(ConfigFlow, domain=DOMAIN): description_placeholders={}, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import habitica config from configuration.yaml.""" async_create_issue( diff --git a/homeassistant/components/habitica/coordinator.py b/homeassistant/components/habitica/coordinator.py index 1b17eee6352..357643593e4 100644 --- a/homeassistant/components/habitica/coordinator.py +++ b/homeassistant/components/habitica/coordinator.py @@ -15,6 +15,7 @@ from habitipy.aio import HabitipyAsync from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.debounce import Debouncer from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN @@ -41,7 +42,13 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): hass, _LOGGER, name=DOMAIN, - update_interval=timedelta(seconds=30), + update_interval=timedelta(seconds=60), + request_refresh_debouncer=Debouncer( + hass, + _LOGGER, + cooldown=5, + immediate=False, + ), ) self.api = habitipy @@ -58,6 +65,9 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): ) except ClientResponseError as error: + if error.status == HTTPStatus.TOO_MANY_REQUESTS: + _LOGGER.debug("Currently rate limited, skipping update") + return self.data raise UpdateFailed(f"Error communicating with API: {error}") from error return HabiticaData(user=user_response, tasks=tasks_response) @@ -80,4 +90,4 @@ class HabiticaDataUpdateCoordinator(DataUpdateCoordinator[HabiticaData]): translation_key="service_call_exception", ) from e else: - await self.async_refresh() + await self.async_request_refresh() diff --git a/homeassistant/components/habitica/icons.json b/homeassistant/components/habitica/icons.json index 710b8c9d25b..662cf1d84a5 100644 --- a/homeassistant/components/habitica/icons.json +++ b/homeassistant/components/habitica/icons.json @@ -88,6 +88,8 @@ } }, "services": { - "api_call": "mdi:console" + "api_call": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/habitica/sensor.py b/homeassistant/components/habitica/sensor.py index 6d1a11ed9c3..8762345b597 100644 --- a/homeassistant/components/habitica/sensor.py +++ b/homeassistant/components/habitica/sensor.py @@ -172,6 +172,7 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = ( translation_key=HabitipySensorEntity.DAILIES, native_unit_of_measurement=UNIT_TASKS, value_fn=lambda tasks: [r for r in tasks if r.get("type") == "daily"], + entity_registry_enabled_default=False, ), HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.TODOS, @@ -180,6 +181,7 @@ TASK_SENSOR_DESCRIPTION: tuple[HabitipyTaskSensorEntityDescription, ...] = ( value_fn=lambda tasks: [ r for r in tasks if r.get("type") == "todo" and not r.get("completed") ], + entity_registry_enabled_default=False, ), HabitipyTaskSensorEntityDescription( key=HabitipySensorEntity.REWARDS, diff --git a/homeassistant/components/habitica/strings.json b/homeassistant/components/habitica/strings.json index 5696e6f9911..21d2622245c 100644 --- a/homeassistant/components/habitica/strings.json +++ b/homeassistant/components/habitica/strings.json @@ -100,7 +100,10 @@ }, "exceptions": { "delete_todos_failed": { - "message": "Unable to delete {count} Habitica to-do(s), please try again" + "message": "Unable to delete item from Habitica to-do list, please try again" + }, + "delete_completed_todos_failed": { + "message": "Unable to delete completed to-do items from Habitica to-do list, please try again" }, "move_todos_item_failed": { "message": "Unable to move the Habitica to-do to position {pos}, please try again" diff --git a/homeassistant/components/habitica/todo.py b/homeassistant/components/habitica/todo.py index ab458f9f59f..ae739d47262 100644 --- a/homeassistant/components/habitica/todo.py +++ b/homeassistant/components/habitica/todo.py @@ -75,16 +75,25 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): async def async_delete_todo_items(self, uids: list[str]) -> None: """Delete Habitica tasks.""" - for task_id in uids: + if len(uids) > 1 and self.entity_description.key is HabiticaTodoList.TODOS: try: - await self.coordinator.api.tasks[task_id].delete() + await self.coordinator.api.tasks.clearCompletedTodos.post() except ClientResponseError as e: raise ServiceValidationError( translation_domain=DOMAIN, - translation_key=f"delete_{self.entity_description.key}_failed", + translation_key="delete_completed_todos_failed", ) from e + else: + for task_id in uids: + try: + await self.coordinator.api.tasks[task_id].delete() + except ClientResponseError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key=f"delete_{self.entity_description.key}_failed", + ) from e - await self.coordinator.async_refresh() + await self.coordinator.async_request_refresh() async def async_move_todo_item( self, uid: str, previous_uid: str | None = None @@ -112,9 +121,22 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): translation_key=f"move_{self.entity_description.key}_item_failed", translation_placeholders={"pos": str(pos)}, ) from e + else: + # move tasks in the coordinator until we have fresh data + tasks = self.coordinator.data.tasks + new_pos = ( + tasks.index(next(task for task in tasks if task["id"] == previous_uid)) + + 1 + if previous_uid + else 0 + ) + old_pos = tasks.index(next(task for task in tasks if task["id"] == uid)) + tasks.insert(new_pos, tasks.pop(old_pos)) + await self.coordinator.async_request_refresh() async def async_update_todo_item(self, item: TodoItem) -> None: """Update a Habitica todo.""" + refresh_required = False current_item = next( (task for task in (self.todo_items or []) if task.uid == item.uid), None, @@ -123,7 +145,6 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): if TYPE_CHECKING: assert item.uid assert current_item - assert item.due if ( self.entity_description.key is HabiticaTodoList.TODOS @@ -133,18 +154,24 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): else: date = None - try: - await self.coordinator.api.tasks[item.uid].put( - text=item.summary, - notes=item.description or "", - date=date, - ) - except ClientResponseError as e: - raise ServiceValidationError( - translation_domain=DOMAIN, - translation_key=f"update_{self.entity_description.key}_item_failed", - translation_placeholders={"name": item.summary or ""}, - ) from e + if ( + item.summary != current_item.summary + or item.description != current_item.description + or item.due != current_item.due + ): + try: + await self.coordinator.api.tasks[item.uid].put( + text=item.summary, + notes=item.description or "", + date=date, + ) + refresh_required = True + except ClientResponseError as e: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key=f"update_{self.entity_description.key}_item_failed", + translation_placeholders={"name": item.summary or ""}, + ) from e try: # Score up or down if item status changed @@ -155,6 +182,7 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): score_result = ( await self.coordinator.api.tasks[item.uid].score["up"].post() ) + refresh_required = True elif ( current_item.status is TodoItemStatus.COMPLETED and item.status == TodoItemStatus.NEEDS_ACTION @@ -162,6 +190,7 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): score_result = ( await self.coordinator.api.tasks[item.uid].score["down"].post() ) + refresh_required = True else: score_result = None @@ -180,8 +209,8 @@ class BaseHabiticaListEntity(HabiticaBase, TodoListEntity): persistent_notification.async_create( self.hass, message=msg, title="Habitica" ) - - await self.coordinator.async_refresh() + if refresh_required: + await self.coordinator.async_request_refresh() class HabiticaTodosListEntity(BaseHabiticaListEntity): @@ -245,7 +274,7 @@ class HabiticaTodosListEntity(BaseHabiticaListEntity): translation_placeholders={"name": item.summary or ""}, ) from e - await self.coordinator.async_refresh() + await self.coordinator.async_request_refresh() class HabiticaDailiesListEntity(BaseHabiticaListEntity): diff --git a/homeassistant/components/harmony/icons.json b/homeassistant/components/harmony/icons.json index f96fd985323..b6fe0d8c42e 100644 --- a/homeassistant/components/harmony/icons.json +++ b/homeassistant/components/harmony/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "sync": "mdi:sync", - "change_channel": "mdi:remote-tv" + "sync": { + "service": "mdi:sync" + }, + "change_channel": { + "service": "mdi:remote-tv" + } } } diff --git a/homeassistant/components/harmony/remote.py b/homeassistant/components/harmony/remote.py index d30aa475944..efbd4b2ac02 100644 --- a/homeassistant/components/harmony/remote.py +++ b/homeassistant/components/harmony/remote.py @@ -75,7 +75,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_SYNC, - {}, + None, "sync", ) platform.async_register_entity_service( diff --git a/homeassistant/components/hassio/icons.json b/homeassistant/components/hassio/icons.json index c55820b58f2..64f032d9f80 100644 --- a/homeassistant/components/hassio/icons.json +++ b/homeassistant/components/hassio/icons.json @@ -10,16 +10,38 @@ } }, "services": { - "addon_start": "mdi:play", - "addon_restart": "mdi:restart", - "addon_stdin": "mdi:console", - "addon_stop": "mdi:stop", - "addon_update": "mdi:update", - "host_reboot": "mdi:restart", - "host_shutdown": "mdi:power", - "backup_full": "mdi:content-save", - "backup_partial": "mdi:content-save", - "restore_full": "mdi:backup-restore", - "restore_partial": "mdi:backup-restore" + "addon_start": { + "service": "mdi:play" + }, + "addon_restart": { + "service": "mdi:restart" + }, + "addon_stdin": { + "service": "mdi:console" + }, + "addon_stop": { + "service": "mdi:stop" + }, + "addon_update": { + "service": "mdi:update" + }, + "host_reboot": { + "service": "mdi:restart" + }, + "host_shutdown": { + "service": "mdi:power" + }, + "backup_full": { + "service": "mdi:content-save" + }, + "backup_partial": { + "service": "mdi:content-save" + }, + "restore_full": { + "service": "mdi:backup-restore" + }, + "restore_partial": { + "service": "mdi:backup-restore" + } } } diff --git a/homeassistant/components/hdmi_cec/icons.json b/homeassistant/components/hdmi_cec/icons.json index 0bfcb98eea2..93647a6bb12 100644 --- a/homeassistant/components/hdmi_cec/icons.json +++ b/homeassistant/components/hdmi_cec/icons.json @@ -1,10 +1,22 @@ { "services": { - "power_on": "mdi:power", - "select_device": "mdi:television", - "send_command": "mdi:console", - "standby": "mdi:power-standby", - "update": "mdi:update", - "volume": "mdi:volume-high" + "power_on": { + "service": "mdi:power" + }, + "select_device": { + "service": "mdi:television" + }, + "send_command": { + "service": "mdi:console" + }, + "standby": { + "service": "mdi:power-standby" + }, + "update": { + "service": "mdi:update" + }, + "volume": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/heos/config_flow.py b/homeassistant/components/heos/config_flow.py index b68d7d16717..57ed51a3c05 100644 --- a/homeassistant/components/heos/config_flow.py +++ b/homeassistant/components/heos/config_flow.py @@ -1,6 +1,6 @@ """Config flow to configure Heos.""" -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse from pyheos import Heos, HeosError @@ -43,15 +43,17 @@ class HeosFlowHandler(ConfigFlow, domain=DOMAIN): # Show selection form return self.async_show_form(step_id="user") - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Occurs when an entry is setup through config.""" - host = user_input[CONF_HOST] + host = import_data[CONF_HOST] # raise_on_progress is False here in case ssdp discovers # heos first which would block the import await self.async_set_unique_id(DOMAIN, raise_on_progress=False) return self.async_create_entry(title=format_title(host), data={CONF_HOST: host}) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Obtain host and validate connection.""" self.hass.data.setdefault(DATA_DISCOVERED_HOSTS, {}) # Only a single entry is needed for all devices diff --git a/homeassistant/components/heos/icons.json b/homeassistant/components/heos/icons.json index 69c434c8287..23c2c8faeaf 100644 --- a/homeassistant/components/heos/icons.json +++ b/homeassistant/components/heos/icons.json @@ -1,6 +1,10 @@ { "services": { - "sign_in": "mdi:login", - "sign_out": "mdi:logout" + "sign_in": { + "service": "mdi:login" + }, + "sign_out": { + "service": "mdi:logout" + } } } diff --git a/homeassistant/components/history_stats/icons.json b/homeassistant/components/history_stats/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/history_stats/icons.json +++ b/homeassistant/components/history_stats/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/history_stats/sensor.py b/homeassistant/components/history_stats/sensor.py index 99e953ff9dd..4558da8722c 100644 --- a/homeassistant/components/history_stats/sensor.py +++ b/homeassistant/components/history_stats/sensor.py @@ -103,10 +103,6 @@ async def async_setup_platform( name: str = config[CONF_NAME] unique_id: str | None = config.get(CONF_UNIQUE_ID) - for template in (start, end): - if template is not None: - template.hass = hass - history_stats = HistoryStats(hass, entity_id, entity_states, start, end, duration) coordinator = HistoryStatsUpdateCoordinator(hass, history_stats, name) await coordinator.async_refresh() diff --git a/homeassistant/components/hitron_coda/device_tracker.py b/homeassistant/components/hitron_coda/device_tracker.py index 68d93e9719d..61199e4b2f7 100644 --- a/homeassistant/components/hitron_coda/device_tracker.py +++ b/homeassistant/components/hitron_coda/device_tracker.py @@ -42,7 +42,7 @@ def get_scanner( return scanner if scanner.success_init else None -Device = namedtuple("Device", ["mac", "name"]) +Device = namedtuple("Device", ["mac", "name"]) # noqa: PYI024 class HitronCODADeviceScanner(DeviceScanner): diff --git a/homeassistant/components/hive/climate.py b/homeassistant/components/hive/climate.py index f4c8e678702..87d93eea95f 100644 --- a/homeassistant/components/hive/climate.py +++ b/homeassistant/components/hive/climate.py @@ -83,7 +83,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_BOOST_HEATING_OFF, - {}, + None, "async_heating_boost_off", ) diff --git a/homeassistant/components/hive/config_flow.py b/homeassistant/components/hive/config_flow.py index f8cb089834a..d6be2d1efab 100644 --- a/homeassistant/components/hive/config_flow.py +++ b/homeassistant/components/hive/config_flow.py @@ -163,11 +163,9 @@ class HiveFlowHandler(ConfigFlow, domain=DOMAIN): } return await self.async_step_user(data) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import user.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) @staticmethod @callback diff --git a/homeassistant/components/hive/icons.json b/homeassistant/components/hive/icons.json index 2704317779c..e4c06556906 100644 --- a/homeassistant/components/hive/icons.json +++ b/homeassistant/components/hive/icons.json @@ -18,8 +18,14 @@ } }, "services": { - "boost_heating_on": "mdi:radiator", - "boost_heating_off": "mdi:radiator-off", - "boost_hot_water": "mdi:water-boiler" + "boost_heating_on": { + "service": "mdi:radiator" + }, + "boost_heating_off": { + "service": "mdi:radiator-off" + }, + "boost_hot_water": { + "service": "mdi:water-boiler" + } } } diff --git a/homeassistant/components/hlk_sw16/config_flow.py b/homeassistant/components/hlk_sw16/config_flow.py index b315d0daa78..8dd75561af3 100644 --- a/homeassistant/components/hlk_sw16/config_flow.py +++ b/homeassistant/components/hlk_sw16/config_flow.py @@ -1,11 +1,12 @@ """Config flow for HLK-SW16.""" import asyncio +from typing import Any from hlk_sw16 import create_hlk_sw16_connection import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant @@ -69,11 +70,13 @@ class SW16FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/holiday/manifest.json b/homeassistant/components/holiday/manifest.json index 0a3064450d4..0a2d98e71c5 100644 --- a/homeassistant/components/holiday/manifest.json +++ b/homeassistant/components/holiday/manifest.json @@ -5,5 +5,5 @@ "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/holiday", "iot_class": "local_polling", - "requirements": ["holidays==0.55", "babel==2.15.0"] + "requirements": ["holidays==0.56", "babel==2.15.0"] } diff --git a/homeassistant/components/home_connect/icons.json b/homeassistant/components/home_connect/icons.json index 48965cc554a..33617f5472e 100644 --- a/homeassistant/components/home_connect/icons.json +++ b/homeassistant/components/home_connect/icons.json @@ -1,11 +1,25 @@ { "services": { - "start_program": "mdi:play", - "select_program": "mdi:form-select", - "pause_program": "mdi:pause", - "resume_program": "mdi:play-pause", - "set_option_active": "mdi:gesture-tap", - "set_option_selected": "mdi:gesture-tap", - "change_setting": "mdi:cog" + "start_program": { + "service": "mdi:play" + }, + "select_program": { + "service": "mdi:form-select" + }, + "pause_program": { + "service": "mdi:pause" + }, + "resume_program": { + "service": "mdi:play-pause" + }, + "set_option_active": { + "service": "mdi:gesture-tap" + }, + "set_option_selected": { + "service": "mdi:gesture-tap" + }, + "change_setting": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/homeassistant/icons.json b/homeassistant/components/homeassistant/icons.json index ec4d5729918..f08fa8d969b 100644 --- a/homeassistant/components/homeassistant/icons.json +++ b/homeassistant/components/homeassistant/icons.json @@ -1,17 +1,43 @@ { "services": { - "check_config": "mdi:receipt-text-check", - "reload_core_config": "mdi:receipt-text-send", - "restart": "mdi:restart", - "set_location": "mdi:map-marker", - "stop": "mdi:stop", - "toggle": "mdi:toggle-switch", - "turn_on": "mdi:power-on", - "turn_off": "mdi:power-off", - "update_entity": "mdi:update", - "reload_custom_templates": "mdi:palette-swatch", - "reload_config_entry": "mdi:reload", - "save_persistent_states": "mdi:content-save", - "reload_all": "mdi:reload" + "check_config": { + "service": "mdi:receipt-text-check" + }, + "reload_core_config": { + "service": "mdi:receipt-text-send" + }, + "restart": { + "service": "mdi:restart" + }, + "set_location": { + "service": "mdi:map-marker" + }, + "stop": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:toggle-switch" + }, + "turn_on": { + "service": "mdi:power-on" + }, + "turn_off": { + "service": "mdi:power-off" + }, + "update_entity": { + "service": "mdi:update" + }, + "reload_custom_templates": { + "service": "mdi:palette-swatch" + }, + "reload_config_entry": { + "service": "mdi:reload" + }, + "save_persistent_states": { + "service": "mdi:content-save" + }, + "reload_all": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/homeassistant/triggers/event.py b/homeassistant/components/homeassistant/triggers/event.py index 98363de1f8d..985e4819b24 100644 --- a/homeassistant/components/homeassistant/triggers/event.py +++ b/homeassistant/components/homeassistant/triggers/event.py @@ -60,7 +60,6 @@ async def async_attach_trigger( trigger_data = trigger_info["trigger_data"] variables = trigger_info["variables"] - template.attach(hass, config[CONF_EVENT_TYPE]) event_types = template.render_complex( config[CONF_EVENT_TYPE], variables, limited=True ) @@ -72,7 +71,6 @@ async def async_attach_trigger( event_data_items: ItemsView | None = None if CONF_EVENT_DATA in config: # Render the schema input - template.attach(hass, config[CONF_EVENT_DATA]) event_data = {} event_data.update( template.render_complex(config[CONF_EVENT_DATA], variables, limited=True) @@ -94,7 +92,6 @@ async def async_attach_trigger( event_context_items: ItemsView | None = None if CONF_EVENT_CONTEXT in config: # Render the schema input - template.attach(hass, config[CONF_EVENT_CONTEXT]) event_context = {} event_context.update( template.render_complex(config[CONF_EVENT_CONTEXT], variables, limited=True) diff --git a/homeassistant/components/homeassistant/triggers/numeric_state.py b/homeassistant/components/homeassistant/triggers/numeric_state.py index bc2c95675ad..dac250792ea 100644 --- a/homeassistant/components/homeassistant/triggers/numeric_state.py +++ b/homeassistant/components/homeassistant/triggers/numeric_state.py @@ -108,7 +108,6 @@ async def async_attach_trigger( below = config.get(CONF_BELOW) above = config.get(CONF_ABOVE) time_delta = config.get(CONF_FOR) - template.attach(hass, time_delta) value_template = config.get(CONF_VALUE_TEMPLATE) unsub_track_same: dict[str, Callable[[], None]] = {} armed_entities: set[str] = set() @@ -119,9 +118,6 @@ async def async_attach_trigger( trigger_data = trigger_info["trigger_data"] _variables = trigger_info["variables"] or {} - if value_template is not None: - value_template.hass = hass - def variables(entity_id: str) -> dict[str, Any]: """Return a dict with trigger variables.""" trigger_info = { diff --git a/homeassistant/components/homeassistant/triggers/state.py b/homeassistant/components/homeassistant/triggers/state.py index e0cbbf09610..53372cb479e 100644 --- a/homeassistant/components/homeassistant/triggers/state.py +++ b/homeassistant/components/homeassistant/triggers/state.py @@ -117,7 +117,6 @@ async def async_attach_trigger( match_to_state = process_state_match(MATCH_ALL) time_delta = config.get(CONF_FOR) - template.attach(hass, time_delta) # If neither CONF_FROM or CONF_TO are specified, # fire on all changes to the state or an attribute match_all = all( diff --git a/homeassistant/components/homeassistant_yellow/__init__.py b/homeassistant/components/homeassistant_yellow/__init__.py index 14c2de2c9a1..04abe5a1dca 100644 --- a/homeassistant/components/homeassistant_yellow/__init__.py +++ b/homeassistant/components/homeassistant_yellow/__init__.py @@ -2,18 +2,24 @@ from __future__ import annotations +import logging + from homeassistant.components.hassio import get_os_info, is_hassio from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( check_multi_pan_addon, - get_zigbee_socket, - multi_pan_addon_using_device, +) +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + guess_firmware_type, ) from homeassistant.config_entries import SOURCE_HARDWARE, ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady, HomeAssistantError from homeassistant.helpers import discovery_flow -from .const import RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA +from .const import FIRMWARE, RADIO_DEVICE, ZHA_HW_DISCOVERY_DATA + +_LOGGER = logging.getLogger(__name__) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: @@ -27,34 +33,26 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # The hassio integration has not yet fetched data from the supervisor raise ConfigEntryNotReady - board: str | None - if (board := os_info.get("board")) is None or board != "yellow": + if os_info.get("board") != "yellow": # Not running on a Home Assistant Yellow, Home Assistant may have been migrated hass.async_create_task(hass.config_entries.async_remove(entry.entry_id)) return False - try: - await check_multi_pan_addon(hass) - except HomeAssistantError as err: - raise ConfigEntryNotReady from err + firmware = ApplicationType(entry.data[FIRMWARE]) - if not await multi_pan_addon_using_device(hass, RADIO_DEVICE): - hw_discovery_data = ZHA_HW_DISCOVERY_DATA - else: - hw_discovery_data = { - "name": "Yellow Multiprotocol", - "port": { - "path": get_zigbee_socket(), - }, - "radio_type": "ezsp", - } + if firmware is ApplicationType.CPC: + try: + await check_multi_pan_addon(hass) + except HomeAssistantError as err: + raise ConfigEntryNotReady from err - discovery_flow.async_create_flow( - hass, - "zha", - context={"source": SOURCE_HARDWARE}, - data=hw_discovery_data, - ) + if firmware is ApplicationType.EZSP: + discovery_flow.async_create_flow( + hass, + "zha", + context={"source": SOURCE_HARDWARE}, + data=ZHA_HW_DISCOVERY_DATA, + ) return True @@ -62,3 +60,39 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" return True + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate old entry.""" + + _LOGGER.debug( + "Migrating from version %s.%s", config_entry.version, config_entry.minor_version + ) + + if config_entry.version == 1: + if config_entry.minor_version == 1: + # Add-on startup with type service get started before Core, always (e.g. the + # Multi-Protocol add-on). Probing the firmware would interfere with the add-on, + # so we can't safely probe here. Instead, we must make an educated guess! + firmware_guess = await guess_firmware_type(hass, RADIO_DEVICE) + + new_data = {**config_entry.data} + new_data[FIRMWARE] = firmware_guess.firmware_type.value + + hass.config_entries.async_update_entry( + config_entry, + data=new_data, + version=1, + minor_version=2, + ) + + _LOGGER.debug( + "Migration to version %s.%s successful", + config_entry.version, + config_entry.minor_version, + ) + + return True + + # This means the user has downgraded from a future version + return False diff --git a/homeassistant/components/homeassistant_yellow/config_flow.py b/homeassistant/components/homeassistant_yellow/config_flow.py index d2212a968db..1f4d150e49b 100644 --- a/homeassistant/components/homeassistant_yellow/config_flow.py +++ b/homeassistant/components/homeassistant_yellow/config_flow.py @@ -2,11 +2,13 @@ from __future__ import annotations +from abc import ABC, abstractmethod import asyncio import logging -from typing import Any +from typing import Any, final import aiohttp +from universal_silabs_flasher.const import ApplicationType import voluptuous as vol from homeassistant.components.hassio import ( @@ -15,12 +17,25 @@ from homeassistant.components.hassio import ( async_reboot_host, async_set_yellow_settings, ) -from homeassistant.components.homeassistant_hardware import silabs_multiprotocol_addon -from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + BaseFirmwareConfigFlow, + BaseFirmwareOptionsFlow, +) +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + OptionsFlowHandler as MultiprotocolOptionsFlowHandler, + SerialPortSettings as MultiprotocolSerialPortSettings, +) +from homeassistant.config_entries import ( + SOURCE_HARDWARE, + ConfigEntry, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.core import callback -from homeassistant.helpers import selector +from homeassistant.helpers import discovery_flow, selector -from .const import DOMAIN, ZHA_HW_DISCOVERY_DATA +from .const import DOMAIN, FIRMWARE, RADIO_DEVICE, ZHA_DOMAIN, ZHA_HW_DISCOVERY_DATA +from .hardware import BOARD_NAME _LOGGER = logging.getLogger(__name__) @@ -33,18 +48,30 @@ STEP_HW_SETTINGS_SCHEMA = vol.Schema( ) -class HomeAssistantYellowConfigFlow(ConfigFlow, domain=DOMAIN): +class HomeAssistantYellowConfigFlow(BaseFirmwareConfigFlow, domain=DOMAIN): """Handle a config flow for Home Assistant Yellow.""" VERSION = 1 + MINOR_VERSION = 2 + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate config flow.""" + super().__init__(*args, **kwargs) + + self._device = RADIO_DEVICE @staticmethod @callback def async_get_options_flow( config_entry: ConfigEntry, - ) -> HomeAssistantYellowOptionsFlow: + ) -> OptionsFlow: """Return the options flow.""" - return HomeAssistantYellowOptionsFlow(config_entry) + firmware_type = ApplicationType(config_entry.data[FIRMWARE]) + + if firmware_type is ApplicationType.CPC: + return HomeAssistantYellowMultiPanOptionsFlowHandler(config_entry) + + return HomeAssistantYellowOptionsFlowHandler(config_entry) async def async_step_system( self, data: dict[str, Any] | None = None @@ -53,30 +80,54 @@ class HomeAssistantYellowConfigFlow(ConfigFlow, domain=DOMAIN): if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Home Assistant Yellow", data={}) + # We do not actually use any portion of `BaseFirmwareConfigFlow` beyond this + await self._probe_firmware_type() + + # Kick off ZHA hardware discovery automatically if Zigbee firmware is running + if self._probed_firmware_type is ApplicationType.EZSP: + discovery_flow.async_create_flow( + self.hass, + ZHA_DOMAIN, + context={"source": SOURCE_HARDWARE}, + data=ZHA_HW_DISCOVERY_DATA, + ) + + return self._async_flow_finished() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + return self.async_create_entry( + title=BOARD_NAME, + data={ + # Assume the firmware type is EZSP if we cannot probe it + FIRMWARE: (self._probed_firmware_type or ApplicationType.EZSP).value, + }, + ) -class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandler): - """Handle an option flow for Home Assistant Yellow.""" +class BaseHomeAssistantYellowOptionsFlow(OptionsFlow, ABC): + """Base Home Assistant Yellow options flow shared between firmware and multi-PAN.""" _hw_settings: dict[str, bool] | None = None + @abstractmethod + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + + @final + async def async_step_init( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Manage the options flow.""" + return await self.async_step_main_menu() + + @final async def async_step_on_supervisor( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle logic when on Supervisor host.""" return await self.async_step_main_menu() - async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: - """Show the main menu.""" - return self.async_show_menu( - step_id="main_menu", - menu_options=[ - "hardware_settings", - "multipan_settings", - ], - ) - async def async_step_hardware_settings( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -133,18 +184,36 @@ class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandl """Reboot later.""" return self.async_create_entry(data={}) + +class HomeAssistantYellowMultiPanOptionsFlowHandler( + BaseHomeAssistantYellowOptionsFlow, MultiprotocolOptionsFlowHandler +): + """Handle a multi-PAN options flow for Home Assistant Yellow.""" + + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + return self.async_show_menu( + step_id="main_menu", + menu_options=[ + "hardware_settings", + "multipan_settings", + ], + ) + async def async_step_multipan_settings( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle multipan settings.""" - return await super().async_step_on_supervisor(user_input) + return await MultiprotocolOptionsFlowHandler.async_step_on_supervisor( + self, user_input + ) async def _async_serial_port_settings( self, - ) -> silabs_multiprotocol_addon.SerialPortSettings: + ) -> MultiprotocolSerialPortSettings: """Return the radio serial port settings.""" - return silabs_multiprotocol_addon.SerialPortSettings( - device="/dev/ttyAMA1", + return MultiprotocolSerialPortSettings( + device=RADIO_DEVICE, baudrate="115200", flow_control=True, ) @@ -163,4 +232,64 @@ class HomeAssistantYellowOptionsFlow(silabs_multiprotocol_addon.OptionsFlowHandl def _hardware_name(self) -> str: """Return the name of the hardware.""" - return "Home Assistant Yellow" + return BOARD_NAME + + async def async_step_flashing_complete( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Finish flashing and update the config entry.""" + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + FIRMWARE: ApplicationType.EZSP.value, + }, + ) + + return await super().async_step_flashing_complete(user_input) + + +class HomeAssistantYellowOptionsFlowHandler( + BaseHomeAssistantYellowOptionsFlow, BaseFirmwareOptionsFlow +): + """Handle a firmware options flow for Home Assistant Yellow.""" + + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Instantiate options flow.""" + super().__init__(*args, **kwargs) + + self._hardware_name = BOARD_NAME + self._device = RADIO_DEVICE + + # Regenerate the translation placeholders + self._get_translation_placeholders() + + async def async_step_main_menu(self, _: None = None) -> ConfigFlowResult: + """Show the main menu.""" + return self.async_show_menu( + step_id="main_menu", + menu_options=[ + "hardware_settings", + "firmware_settings", + ], + ) + + async def async_step_firmware_settings( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle firmware configuration settings.""" + return await super().async_step_pick_firmware() + + def _async_flow_finished(self) -> ConfigFlowResult: + """Create the config entry.""" + assert self._probed_firmware_type is not None + + self.hass.config_entries.async_update_entry( + entry=self.config_entry, + data={ + **self.config_entry.data, + FIRMWARE: self._probed_firmware_type.value, + }, + ) + + return self.async_create_entry(title="", data={}) diff --git a/homeassistant/components/homeassistant_yellow/const.py b/homeassistant/components/homeassistant_yellow/const.py index 8f1f9a4c2b8..79753ae9b9e 100644 --- a/homeassistant/components/homeassistant_yellow/const.py +++ b/homeassistant/components/homeassistant_yellow/const.py @@ -12,3 +12,6 @@ ZHA_HW_DISCOVERY_DATA = { }, "radio_type": "efr32", } + +FIRMWARE = "firmware" +ZHA_DOMAIN = "zha" diff --git a/homeassistant/components/homeassistant_yellow/strings.json b/homeassistant/components/homeassistant_yellow/strings.json index 95442d31500..fd3be3586b1 100644 --- a/homeassistant/components/homeassistant_yellow/strings.json +++ b/homeassistant/components/homeassistant_yellow/strings.json @@ -42,6 +42,7 @@ "main_menu": { "menu_options": { "hardware_settings": "[%key:component::homeassistant_yellow::options::step::hardware_settings::title%]", + "firmware_settings": "Switch between Zigbee or Thread firmware.", "multipan_settings": "Configure IEEE 802.15.4 radio multiprotocol support" } }, @@ -79,6 +80,46 @@ "start_flasher_addon": { "title": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::title%]", "description": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::step::start_flasher_addon::description%]" + }, + "pick_firmware": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::description%]", + "menu_options": { + "pick_firmware_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_thread%]", + "pick_firmware_zigbee": "[%key:component::homeassistant_hardware::firmware_picker::options::step::pick_firmware::menu_options::pick_firmware_zigbee%]" + } + }, + "install_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_zigbee_flasher_addon::description%]" + }, + "run_zigbee_flasher_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::run_zigbee_flasher_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::run_zigbee_flasher_addon::description%]" + }, + "zigbee_flasher_failed": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_flasher_failed::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::zigbee_flasher_failed::description%]" + }, + "confirm_zigbee": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_zigbee::description%]" + }, + "install_otbr_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::install_otbr_addon::description%]" + }, + "start_otbr_addon": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::start_otbr_addon::description%]" + }, + "otbr_failed": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::otbr_failed::description%]" + }, + "confirm_otbr": { + "title": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::title%]", + "description": "[%key:component::homeassistant_hardware::firmware_picker::options::step::confirm_otbr::description%]" } }, "error": { @@ -93,11 +134,19 @@ "not_hassio": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::not_hassio%]", "read_hw_settings_error": "Failed to read hardware settings", "write_hw_settings_error": "Failed to write hardware settings", - "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]" + "zha_migration_failed": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::abort::zha_migration_failed%]", + "not_hassio_thread": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::not_hassio_thread%]", + "otbr_addon_already_running": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_addon_already_running%]", + "zha_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::zha_still_using_stick%]", + "otbr_still_using_stick": "[%key:component::homeassistant_hardware::firmware_picker::options::abort::otbr_still_using_stick%]" }, "progress": { "install_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::install_addon%]", - "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]" + "start_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "start_otbr_addon": "[%key:component::homeassistant_hardware::silabs_multiprotocol_hardware::options::progress::start_addon%]", + "install_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::install_zigbee_flasher_addon%]", + "run_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::run_zigbee_flasher_addon%]", + "uninstall_zigbee_flasher_addon": "[%key:component::homeassistant_hardware::firmware_picker::options::progress::uninstall_zigbee_flasher_addon%]" } } } diff --git a/homeassistant/components/homekit/config_flow.py b/homeassistant/components/homekit/config_flow.py index 78979f73490..f88aa646f04 100644 --- a/homeassistant/components/homekit/config_flow.py +++ b/homeassistant/components/homekit/config_flow.py @@ -311,12 +311,12 @@ class HomeKitConfigFlow(ConfigFlow, domain=DOMAIN): title=f"{name}:{entry_data[CONF_PORT]}", data=entry_data ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from yaml.""" - if not self._async_is_unique_name_port(user_input): + if not self._async_is_unique_name_port(import_data): return self.async_abort(reason="port_name_in_use") return self.async_create_entry( - title=f"{user_input[CONF_NAME]}:{user_input[CONF_PORT]}", data=user_input + title=f"{import_data[CONF_NAME]}:{import_data[CONF_PORT]}", data=import_data ) @callback diff --git a/homeassistant/components/homekit/icons.json b/homeassistant/components/homekit/icons.json index fb0461eb5d8..7d8ddf131ef 100644 --- a/homeassistant/components/homekit/icons.json +++ b/homeassistant/components/homekit/icons.json @@ -1,7 +1,13 @@ { "services": { - "reload": "mdi:reload", - "reset_accessory": "mdi:cog-refresh", - "unpair": "mdi:link-variant-off" + "reload": { + "service": "mdi:reload" + }, + "reset_accessory": { + "service": "mdi:cog-refresh" + }, + "unpair": { + "service": "mdi:link-variant-off" + } } } diff --git a/homeassistant/components/homekit/manifest.json b/homeassistant/components/homekit/manifest.json index 17d1237e579..eebdc0026fd 100644 --- a/homeassistant/components/homekit/manifest.json +++ b/homeassistant/components/homekit/manifest.json @@ -10,7 +10,7 @@ "loggers": ["pyhap"], "requirements": [ "HAP-python==4.9.1", - "fnv-hash-fast==0.5.0", + "fnv-hash-fast==1.0.2", "PyQRCode==1.2.1", "base36==0.1.1" ], diff --git a/homeassistant/components/homekit/type_thermostats.py b/homeassistant/components/homekit/type_thermostats.py index 5dc520e8568..91bab2d470a 100644 --- a/homeassistant/components/homekit/type_thermostats.py +++ b/homeassistant/components/homekit/type_thermostats.py @@ -150,6 +150,8 @@ HC_HASS_TO_HOMEKIT_ACTION = { HVACAction.COOLING: HC_HEAT_COOL_COOL, HVACAction.DRYING: HC_HEAT_COOL_COOL, HVACAction.FAN: HC_HEAT_COOL_COOL, + HVACAction.PREHEATING: HC_HEAT_COOL_HEAT, + HVACAction.DEFROSTING: HC_HEAT_COOL_HEAT, } FAN_STATE_INACTIVE = 0 @@ -624,8 +626,9 @@ class Thermostat(HomeAccessory): # Set current operation mode for supported thermostats if hvac_action := attributes.get(ATTR_HVAC_ACTION): - homekit_hvac_action = HC_HASS_TO_HOMEKIT_ACTION[hvac_action] - self.char_current_heat_cool.set_value(homekit_hvac_action) + self.char_current_heat_cool.set_value( + HC_HASS_TO_HOMEKIT_ACTION.get(hvac_action, HC_HEAT_COOL_OFF) + ) # Update current temperature current_temp = _get_current_temperature(new_state, self._unit) diff --git a/homeassistant/components/homekit_controller/connection.py b/homeassistant/components/homekit_controller/connection.py index 4da907daf3e..934e7e883ae 100644 --- a/homeassistant/components/homekit_controller/connection.py +++ b/homeassistant/components/homekit_controller/connection.py @@ -154,6 +154,7 @@ class HKDevice: self._pending_subscribes: set[tuple[int, int]] = set() self._subscribe_timer: CALLBACK_TYPE | None = None self._load_platforms_lock = asyncio.Lock() + self._full_update_requested: bool = False @property def entity_map(self) -> Accessories: @@ -841,6 +842,7 @@ class HKDevice: async def async_request_update(self, now: datetime | None = None) -> None: """Request an debounced update from the accessory.""" + self._full_update_requested = True await self._debounced_update.async_call() async def async_update(self, now: datetime | None = None) -> None: @@ -849,7 +851,8 @@ class HKDevice: accessories = self.entity_map.accessories if ( - len(accessories) == 1 + not self._full_update_requested + and len(accessories) == 1 and self.available and not (to_poll - self.watchable_characteristics) and self.pairing.is_available @@ -879,6 +882,8 @@ class HKDevice: firmware_iid = accessory_info[CharacteristicsTypes.FIRMWARE_REVISION].iid to_poll = {(first_accessory.aid, firmware_iid)} + self._full_update_requested = False + if not to_poll: self.async_update_available_state() _LOGGER.debug( diff --git a/homeassistant/components/homekit_controller/cover.py b/homeassistant/components/homekit_controller/cover.py index d0944db38f8..0eebb72c988 100644 --- a/homeassistant/components/homekit_controller/cover.py +++ b/homeassistant/components/homekit_controller/cover.py @@ -214,34 +214,32 @@ class HomeKitWindowCover(HomeKitEntity, CoverEntity): @property def current_cover_tilt_position(self) -> int | None: """Return current position of cover tilt.""" - tilt_position = self.service.value(CharacteristicsTypes.VERTICAL_TILT_CURRENT) - if not tilt_position: - tilt_position = self.service.value( - CharacteristicsTypes.HORIZONTAL_TILT_CURRENT - ) - if tilt_position is None: - return None - # Recalculate to convert from arcdegree scale to percentage scale. if self.is_vertical_tilt: - scale = 0.9 - if ( - self.service[CharacteristicsTypes.VERTICAL_TILT_CURRENT].minValue == -90 - and self.service[CharacteristicsTypes.VERTICAL_TILT_CURRENT].maxValue - == 0 - ): - scale = -0.9 - tilt_position = int(tilt_position / scale) + char = self.service[CharacteristicsTypes.VERTICAL_TILT_CURRENT] elif self.is_horizontal_tilt: - scale = 0.9 - if ( - self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET].minValue - == -90 - and self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET].maxValue - == 0 - ): - scale = -0.9 - tilt_position = int(tilt_position / scale) - return tilt_position + char = self.service[CharacteristicsTypes.HORIZONTAL_TILT_CURRENT] + else: + return None + + # Recalculate tilt_position. Convert arc to percent scale based on min/max values. + tilt_position = char.value + min_value = char.minValue + max_value = char.maxValue + total_range = int(max_value or 0) - int(min_value or 0) + + if ( + tilt_position is None + or min_value is None + or max_value is None + or total_range <= 0 + ): + return None + + # inverted scale + if min_value == -90 and max_value == 0: + return abs(int(100 / total_range * (tilt_position - max_value))) + # normal scale + return abs(int(100 / total_range * (tilt_position - min_value))) async def async_stop_cover(self, **kwargs: Any) -> None: """Send hold command.""" @@ -265,34 +263,32 @@ class HomeKitWindowCover(HomeKitEntity, CoverEntity): async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: """Move the cover tilt to a specific position.""" tilt_position = kwargs[ATTR_TILT_POSITION] + if self.is_vertical_tilt: - # Recalculate to convert from percentage scale to arcdegree scale. - scale = 0.9 - if ( - self.service[CharacteristicsTypes.VERTICAL_TILT_TARGET].minValue == -90 - and self.service[CharacteristicsTypes.VERTICAL_TILT_TARGET].maxValue - == 0 - ): - scale = -0.9 - tilt_position = int(tilt_position * scale) - await self.async_put_characteristics( - {CharacteristicsTypes.VERTICAL_TILT_TARGET: tilt_position} - ) + char = self.service[CharacteristicsTypes.VERTICAL_TILT_TARGET] elif self.is_horizontal_tilt: - # Recalculate to convert from percentage scale to arcdegree scale. - scale = 0.9 - if ( - self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET].minValue - == -90 - and self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET].maxValue - == 0 - ): - scale = -0.9 - tilt_position = int(tilt_position * scale) - await self.async_put_characteristics( - {CharacteristicsTypes.HORIZONTAL_TILT_TARGET: tilt_position} + char = self.service[CharacteristicsTypes.HORIZONTAL_TILT_TARGET] + + # Calculate tilt_position. Convert from 1-100 scale to arc degree scale respecting possible min/max Values. + min_value = char.minValue + max_value = char.maxValue + if min_value is None or max_value is None: + raise ValueError( + "Entity does not provide minValue and maxValue for the tilt" ) + # inverted scale + if min_value == -90 and max_value == 0: + tilt_position = int( + tilt_position / 100 * (min_value - max_value) + max_value + ) + else: + tilt_position = int( + tilt_position / 100 * (max_value - min_value) + min_value + ) + + await self.async_put_characteristics({char.type: tilt_position}) + @property def extra_state_attributes(self) -> dict[str, Any]: """Return the optional state attributes.""" diff --git a/homeassistant/components/homematic/__init__.py b/homeassistant/components/homematic/__init__.py index 80345866b1f..f0fc2a40278 100644 --- a/homeassistant/components/homematic/__init__.py +++ b/homeassistant/components/homematic/__init__.py @@ -573,6 +573,8 @@ def _create_ha_id(name, channel, param, count): if count > 1 and param is not None: return f"{name} {channel} {param}" + raise ValueError(f"Unable to create unique id for count:{count} and param:{param}") + def _hm_event_handler(hass, interface, device, caller, attribute, value): """Handle all pyhomematic device events.""" @@ -621,3 +623,4 @@ def _device_from_servicecall(hass, service): for devices in hass.data[DATA_HOMEMATIC].devices.values(): if address in devices: return devices[address] + return None diff --git a/homeassistant/components/homematic/climate.py b/homeassistant/components/homematic/climate.py index bf1295df6be..2be28487cbb 100644 --- a/homeassistant/components/homematic/climate.py +++ b/homeassistant/components/homematic/climate.py @@ -125,6 +125,7 @@ class HMThermostat(HMDevice, ClimateEntity): for node in HM_HUMI_MAP: if node in self._data: return self._data[node] + return None @property def current_temperature(self): @@ -132,6 +133,7 @@ class HMThermostat(HMDevice, ClimateEntity): for node in HM_TEMP_MAP: if node in self._data: return self._data[node] + return None @property def target_temperature(self): diff --git a/homeassistant/components/homematic/icons.json b/homeassistant/components/homematic/icons.json index 998c9a385ba..9e58bbe3a90 100644 --- a/homeassistant/components/homematic/icons.json +++ b/homeassistant/components/homematic/icons.json @@ -1,10 +1,22 @@ { "services": { - "virtualkey": "mdi:keyboard", - "set_variable_value": "mdi:console", - "set_device_value": "mdi:television", - "reconnect": "mdi:wifi-refresh", - "set_install_mode": "mdi:cog", - "put_paramset": "mdi:cog" + "virtualkey": { + "service": "mdi:keyboard" + }, + "set_variable_value": { + "service": "mdi:console" + }, + "set_device_value": { + "service": "mdi:television" + }, + "reconnect": { + "service": "mdi:wifi-refresh" + }, + "set_install_mode": { + "service": "mdi:cog" + }, + "put_paramset": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/homematicip_cloud/config_flow.py b/homeassistant/components/homematicip_cloud/config_flow.py index c2277e16c79..a8b17a80aff 100644 --- a/homeassistant/components/homematicip_cloud/config_flow.py +++ b/homeassistant/components/homematicip_cloud/config_flow.py @@ -83,11 +83,11 @@ class HomematicipCloudFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="link", errors=errors) - async def async_step_import(self, import_info: dict[str, str]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, str]) -> ConfigFlowResult: """Import a new access point as a config entry.""" - hapid = import_info[HMIPC_HAPID].replace("-", "").upper() - authtoken = import_info[HMIPC_AUTHTOKEN] - name = import_info[HMIPC_NAME] + hapid = import_data[HMIPC_HAPID].replace("-", "").upper() + authtoken = import_data[HMIPC_AUTHTOKEN] + name = import_data[HMIPC_NAME] await self.async_set_unique_id(hapid) self._abort_if_unique_id_configured() diff --git a/homeassistant/components/homematicip_cloud/icons.json b/homeassistant/components/homematicip_cloud/icons.json index 2e9f6158c35..53a39d8213c 100644 --- a/homeassistant/components/homematicip_cloud/icons.json +++ b/homeassistant/components/homematicip_cloud/icons.json @@ -1,12 +1,31 @@ { "services": { - "activate_eco_mode_with_duration": "mdi:leaf", - "activate_eco_mode_with_period": "mdi:leaf", - "activate_vacation": "mdi:compass", - "deactivate_eco_mode": "mdi:leaf-off", - "deactivate_vacation": "mdi:compass-off", - "set_active_climate_profile": "mdi:home-thermometer", - "dump_hap_config": "mdi:database-export", - "reset_energy_counter": "mdi:reload" + "activate_eco_mode_with_duration": { + "service": "mdi:leaf" + }, + "activate_eco_mode_with_period": { + "service": "mdi:leaf" + }, + "activate_vacation": { + "service": "mdi:compass" + }, + "deactivate_eco_mode": { + "service": "mdi:leaf-off" + }, + "deactivate_vacation": { + "service": "mdi:compass-off" + }, + "set_active_climate_profile": { + "service": "mdi:home-thermometer" + }, + "dump_hap_config": { + "service": "mdi:database-export" + }, + "reset_energy_counter": { + "service": "mdi:reload" + }, + "set_home_cooling_mode": { + "service": "mdi:snowflake" + } } } diff --git a/homeassistant/components/homematicip_cloud/manifest.json b/homeassistant/components/homematicip_cloud/manifest.json index 024cb2d9f21..b3e7eb9a72a 100644 --- a/homeassistant/components/homematicip_cloud/manifest.json +++ b/homeassistant/components/homematicip_cloud/manifest.json @@ -7,5 +7,5 @@ "iot_class": "cloud_push", "loggers": ["homematicip"], "quality_scale": "silver", - "requirements": ["homematicip==1.1.1"] + "requirements": ["homematicip==1.1.2"] } diff --git a/homeassistant/components/homematicip_cloud/services.py b/homeassistant/components/homematicip_cloud/services.py index 37cda9e7683..4c04e4a858b 100644 --- a/homeassistant/components/homematicip_cloud/services.py +++ b/homeassistant/components/homematicip_cloud/services.py @@ -13,6 +13,7 @@ import voluptuous as vol from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE from homeassistant.core import HomeAssistant, ServiceCall +from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.config_validation import comp_entity_ids from homeassistant.helpers.service import ( @@ -31,6 +32,7 @@ ATTR_CONFIG_OUTPUT_FILE_PREFIX = "config_output_file_prefix" ATTR_CONFIG_OUTPUT_PATH = "config_output_path" ATTR_DURATION = "duration" ATTR_ENDTIME = "endtime" +ATTR_COOLING = "cooling" DEFAULT_CONFIG_FILE_PREFIX = "hmip-config" @@ -42,6 +44,7 @@ SERVICE_DEACTIVATE_VACATION = "deactivate_vacation" SERVICE_DUMP_HAP_CONFIG = "dump_hap_config" SERVICE_RESET_ENERGY_COUNTER = "reset_energy_counter" SERVICE_SET_ACTIVE_CLIMATE_PROFILE = "set_active_climate_profile" +SERVICE_SET_HOME_COOLING_MODE = "set_home_cooling_mode" HMIPC_SERVICES = [ SERVICE_ACTIVATE_ECO_MODE_WITH_DURATION, @@ -52,6 +55,7 @@ HMIPC_SERVICES = [ SERVICE_DUMP_HAP_CONFIG, SERVICE_RESET_ENERGY_COUNTER, SERVICE_SET_ACTIVE_CLIMATE_PROFILE, + SERVICE_SET_HOME_COOLING_MODE, ] SCHEMA_ACTIVATE_ECO_MODE_WITH_DURATION = vol.Schema( @@ -107,6 +111,13 @@ SCHEMA_RESET_ENERGY_COUNTER = vol.Schema( {vol.Required(ATTR_ENTITY_ID): comp_entity_ids} ) +SCHEMA_SET_HOME_COOLING_MODE = vol.Schema( + { + vol.Optional(ATTR_COOLING, default=True): cv.boolean, + vol.Optional(ATTR_ACCESSPOINT_ID): vol.All(str, vol.Length(min=24, max=24)), + } +) + async def async_setup_services(hass: HomeAssistant) -> None: """Set up the HomematicIP Cloud services.""" @@ -135,6 +146,8 @@ async def async_setup_services(hass: HomeAssistant) -> None: await _async_reset_energy_counter(hass, service) elif service_name == SERVICE_SET_ACTIVE_CLIMATE_PROFILE: await _set_active_climate_profile(hass, service) + elif service_name == SERVICE_SET_HOME_COOLING_MODE: + await _async_set_home_cooling_mode(hass, service) hass.services.async_register( domain=HMIPC_DOMAIN, @@ -194,6 +207,14 @@ async def async_setup_services(hass: HomeAssistant) -> None: schema=SCHEMA_RESET_ENERGY_COUNTER, ) + async_register_admin_service( + hass=hass, + domain=HMIPC_DOMAIN, + service=SERVICE_SET_HOME_COOLING_MODE, + service_func=async_call_hmipc_service, + schema=SCHEMA_SET_HOME_COOLING_MODE, + ) + async def async_unload_services(hass: HomeAssistant): """Unload HomematicIP Cloud services.""" @@ -324,10 +345,25 @@ async def _async_reset_energy_counter(hass: HomeAssistant, service: ServiceCall) await device.reset_energy_counter() +async def _async_set_home_cooling_mode(hass: HomeAssistant, service: ServiceCall): + """Service to set the cooling mode.""" + cooling = service.data[ATTR_COOLING] + + if hapid := service.data.get(ATTR_ACCESSPOINT_ID): + if home := _get_home(hass, hapid): + await home.set_cooling(cooling) + else: + for hap in hass.data[HMIPC_DOMAIN].values(): + await hap.home.set_cooling(cooling) + + def _get_home(hass: HomeAssistant, hapid: str) -> AsyncHome | None: """Return a HmIP home.""" if hap := hass.data[HMIPC_DOMAIN].get(hapid): return hap.home - _LOGGER.info("No matching access point found for access point id %s", hapid) - return None + raise ServiceValidationError( + translation_domain=HMIPC_DOMAIN, + translation_key="access_point_not_found", + translation_placeholders={"id": hapid}, + ) diff --git a/homeassistant/components/homematicip_cloud/services.yaml b/homeassistant/components/homematicip_cloud/services.yaml index 9e831339787..aced5c838a6 100644 --- a/homeassistant/components/homematicip_cloud/services.yaml +++ b/homeassistant/components/homematicip_cloud/services.yaml @@ -98,3 +98,14 @@ reset_energy_counter: example: switch.livingroom selector: text: + +set_home_cooling_mode: + fields: + cooling: + default: true + selector: + boolean: + accesspoint_id: + example: 3014xxxxxxxxxxxxxxxxxxxx + selector: + text: diff --git a/homeassistant/components/homematicip_cloud/strings.json b/homeassistant/components/homematicip_cloud/strings.json index 3795508d75d..a7c795c81f6 100644 --- a/homeassistant/components/homematicip_cloud/strings.json +++ b/homeassistant/components/homematicip_cloud/strings.json @@ -26,6 +26,11 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, + "exceptions": { + "access_point_not_found": { + "message": "No matching access point found for access point id {id}" + } + }, "services": { "activate_eco_mode_with_duration": { "name": "Activate eco mode with duration", @@ -134,6 +139,20 @@ "description": "The ID of the measuring entity. Use 'all' keyword to reset all energy counters." } } + }, + "set_home_cooling_mode": { + "name": "Set home cooling mode", + "description": "Set the heating/cooling mode for the entire home", + "fields": { + "accesspoint_id": { + "name": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_duration::fields::accesspoint_id::name%]", + "description": "[%key:component::homematicip_cloud::services::activate_eco_mode_with_duration::fields::accesspoint_id::description%]" + }, + "cooling": { + "name": "Cooling", + "description": "Enable for cooling mode, disable for heating mode" + } + } } } } diff --git a/homeassistant/components/homewizard/manifest.json b/homeassistant/components/homewizard/manifest.json index dbad91b1fb8..65672903eb8 100644 --- a/homeassistant/components/homewizard/manifest.json +++ b/homeassistant/components/homewizard/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_polling", "loggers": ["homewizard_energy"], "quality_scale": "platinum", - "requirements": ["python-homewizard-energy==v6.2.0"], + "requirements": ["python-homewizard-energy==v6.3.0"], "zeroconf": ["_hwenergy._tcp.local."] } diff --git a/homeassistant/components/homewizard/sensor.py b/homeassistant/components/homewizard/sensor.py index 1f77af110b0..c5cf0bc64c7 100644 --- a/homeassistant/components/homewizard/sensor.py +++ b/homeassistant/components/homewizard/sensor.py @@ -18,7 +18,6 @@ from homeassistant.components.sensor import ( from homeassistant.const import ( ATTR_VIA_DEVICE, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, EntityCategory, Platform, UnitOfApparentPower, @@ -27,6 +26,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfVolume, ) from homeassistant.core import HomeAssistant @@ -404,7 +404,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( ), HomeWizardSensorEntityDescription( key="active_reactive_power_var", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -415,7 +415,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( key="active_reactive_power_l1_var", translation_key="active_reactive_power_phase_var", translation_placeholders={"phase": "1"}, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -426,7 +426,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( key="active_reactive_power_l2_var", translation_key="active_reactive_power_phase_var", translation_placeholders={"phase": "2"}, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, @@ -437,7 +437,7 @@ SENSORS: Final[tuple[HomeWizardSensorEntityDescription, ...]] = ( key="active_reactive_power_l3_var", translation_key="active_reactive_power_phase_var", translation_placeholders={"phase": "3"}, - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, device_class=SensorDeviceClass.REACTIVE_POWER, state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, diff --git a/homeassistant/components/homeworks/config_flow.py b/homeassistant/components/homeworks/config_flow.py index 9247670b40b..8e9c8e3b29a 100644 --- a/homeassistant/components/homeworks/config_flow.py +++ b/homeassistant/components/homeworks/config_flow.py @@ -588,12 +588,16 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): suggested_values = { CONF_HOST: entry.options[CONF_HOST], CONF_PORT: entry.options[CONF_PORT], + CONF_USERNAME: entry.data.get(CONF_USERNAME), + CONF_PASSWORD: entry.data.get(CONF_PASSWORD), } if user_input: suggested_values = { CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT], + CONF_USERNAME: user_input.get(CONF_USERNAME), + CONF_PASSWORD: user_input.get(CONF_PASSWORD), } try: await self._validate_edit_controller(user_input) @@ -652,7 +656,9 @@ class HomeworksConfigFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form( step_id="user", - data_schema=DATA_SCHEMA_ADD_CONTROLLER, + data_schema=self.add_suggested_values_to_schema( + DATA_SCHEMA_ADD_CONTROLLER, user_input + ), errors=errors, ) diff --git a/homeassistant/components/homeworks/icons.json b/homeassistant/components/homeworks/icons.json index f53b447d96e..fc39b2ef455 100644 --- a/homeassistant/components/homeworks/icons.json +++ b/homeassistant/components/homeworks/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_command": "mdi:console" + "send_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/hp_ilo/sensor.py b/homeassistant/components/hp_ilo/sensor.py index 85908a45af4..0eeb443cf2d 100644 --- a/homeassistant/components/hp_ilo/sensor.py +++ b/homeassistant/components/hp_ilo/sensor.py @@ -131,9 +131,6 @@ class HpIloSensor(SensorEntity): self._unit_of_measurement = unit_of_measurement self._ilo_function = SENSOR_TYPES[sensor_type][1] self.hp_ilo_data = hp_ilo_data - - if sensor_value_template is not None: - sensor_value_template.hass = hass self._sensor_value_template = sensor_value_template self._state = None diff --git a/homeassistant/components/html5/icons.json b/homeassistant/components/html5/icons.json index c3d6e27efda..d0a6013dd12 100644 --- a/homeassistant/components/html5/icons.json +++ b/homeassistant/components/html5/icons.json @@ -1,5 +1,7 @@ { "services": { - "dismiss": "mdi:bell-off" + "dismiss": { + "service": "mdi:bell-off" + } } } diff --git a/homeassistant/components/huawei_lte/icons.json b/homeassistant/components/huawei_lte/icons.json index d105702bf51..a338cc65ed4 100644 --- a/homeassistant/components/huawei_lte/icons.json +++ b/homeassistant/components/huawei_lte/icons.json @@ -53,7 +53,11 @@ } }, "services": { - "resume_integration": "mdi:play-pause", - "suspend_integration": "mdi:pause" + "resume_integration": { + "service": "mdi:play-pause" + }, + "suspend_integration": { + "service": "mdi:pause" + } } } diff --git a/homeassistant/components/hue/config_flow.py b/homeassistant/components/hue/config_flow.py index fb32f568ee1..e73ae8fe11d 100644 --- a/homeassistant/components/hue/config_flow.py +++ b/homeassistant/components/hue/config_flow.py @@ -258,7 +258,7 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): await self._async_handle_discovery_without_unique_id() return await self.async_step_link() - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a new bridge as a config entry. This flow is triggered by `async_setup` for both configured and @@ -268,9 +268,9 @@ class HueFlowHandler(ConfigFlow, domain=DOMAIN): This flow is also triggered by `async_step_discovery`. """ # Check if host exists, abort if so. - self._async_abort_entries_match({"host": import_info["host"]}) + self._async_abort_entries_match({"host": import_data["host"]}) - bridge = await self._get_bridge(import_info["host"]) + bridge = await self._get_bridge(import_data["host"]) if bridge is None: return self.async_abort(reason="cannot_connect") self.bridge = bridge diff --git a/homeassistant/components/hue/icons.json b/homeassistant/components/hue/icons.json index 9371ae5843e..31464308b0a 100644 --- a/homeassistant/components/hue/icons.json +++ b/homeassistant/components/hue/icons.json @@ -1,6 +1,10 @@ { "services": { - "hue_activate_scene": "mdi:palette", - "activate_scene": "mdi:palette" + "hue_activate_scene": { + "service": "mdi:palette" + }, + "activate_scene": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/huisbaasje/config_flow.py b/homeassistant/components/huisbaasje/config_flow.py index ecf8cdbe431..43fbe839fa6 100644 --- a/homeassistant/components/huisbaasje/config_flow.py +++ b/homeassistant/components/huisbaasje/config_flow.py @@ -1,11 +1,12 @@ """Config flow for EnergyFlip integration.""" import logging +from typing import Any from energyflip import EnergyFlip, EnergyFlipConnectionException, EnergyFlipException import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.data_entry_flow import AbortFlow @@ -23,7 +24,9 @@ class EnergyFlipConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if user_input is None: return await self._show_setup_form(user_input) diff --git a/homeassistant/components/humidifier/__init__.py b/homeassistant/components/humidifier/__init__.py index ce94eaaf5a0..37e2bd3e3ba 100644 --- a/homeassistant/components/humidifier/__init__.py +++ b/homeassistant/components/humidifier/__init__.py @@ -92,9 +92,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) await component.async_setup(config) - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") component.async_register_entity_service( SERVICE_SET_MODE, {vol.Required(ATTR_MODE): cv.string}, diff --git a/homeassistant/components/humidifier/icons.json b/homeassistant/components/humidifier/icons.json index 2c67f759195..15951df432d 100644 --- a/homeassistant/components/humidifier/icons.json +++ b/homeassistant/components/humidifier/icons.json @@ -33,10 +33,20 @@ } }, "services": { - "set_humidity": "mdi:water-percent", - "set_mode": "mdi:air-humidifier", - "toggle": "mdi:air-humidifier", - "turn_off": "mdi:air-humidifier-off", - "turn_on": "mdi:air-humidifier" + "set_humidity": { + "service": "mdi:water-percent" + }, + "set_mode": { + "service": "mdi:air-humidifier" + }, + "toggle": { + "service": "mdi:air-humidifier" + }, + "turn_off": { + "service": "mdi:air-humidifier-off" + }, + "turn_on": { + "service": "mdi:air-humidifier" + } } } diff --git a/homeassistant/components/hunterdouglas_powerview/__init__.py b/homeassistant/components/hunterdouglas_powerview/__init__.py index 6f63641b722..f8c7ac43b94 100644 --- a/homeassistant/components/hunterdouglas_powerview/__init__.py +++ b/homeassistant/components/hunterdouglas_powerview/__init__.py @@ -13,7 +13,6 @@ from homeassistant.const import CONF_API_VERSION, CONF_HOST, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from .const import DOMAIN, HUB_EXCEPTIONS from .coordinator import PowerviewShadeUpdateCoordinator @@ -22,7 +21,6 @@ from .shade_data import PowerviewShadeData PARALLEL_UPDATES = 1 -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [ Platform.BUTTON, diff --git a/homeassistant/components/husqvarna_automower/button.py b/homeassistant/components/husqvarna_automower/button.py index a9747108393..810dd4df92d 100644 --- a/homeassistant/components/husqvarna_automower/button.py +++ b/homeassistant/components/husqvarna_automower/button.py @@ -25,7 +25,9 @@ async def async_setup_entry( """Set up button platform.""" coordinator = entry.runtime_data async_add_entities( - AutomowerButtonEntity(mower_id, coordinator) for mower_id in coordinator.data + AutomowerButtonEntity(mower_id, coordinator) + for mower_id in coordinator.data + if coordinator.data[mower_id].capabilities.can_confirm_error ) @@ -33,7 +35,6 @@ class AutomowerButtonEntity(AutomowerAvailableEntity, ButtonEntity): """Defining the AutomowerButtonEntity.""" _attr_translation_key = "confirm_error" - _attr_entity_registry_enabled_default = False def __init__( self, diff --git a/homeassistant/components/husqvarna_automower/icons.json b/homeassistant/components/husqvarna_automower/icons.json index 9dc1cbeb667..bcaf1826260 100644 --- a/homeassistant/components/husqvarna_automower/icons.json +++ b/homeassistant/components/husqvarna_automower/icons.json @@ -34,7 +34,11 @@ } }, "services": { - "override_schedule": "mdi:debug-step-over", - "override_schedule_work_area": "mdi:land-fields" + "override_schedule": { + "service": "mdi:debug-step-over" + }, + "override_schedule_work_area": { + "service": "mdi:land-fields" + } } } diff --git a/homeassistant/components/husqvarna_automower/manifest.json b/homeassistant/components/husqvarna_automower/manifest.json index bb03806e417..7326408e403 100644 --- a/homeassistant/components/husqvarna_automower/manifest.json +++ b/homeassistant/components/husqvarna_automower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/husqvarna_automower", "iot_class": "cloud_push", "loggers": ["aioautomower"], - "requirements": ["aioautomower==2024.7.3"] + "requirements": ["aioautomower==2024.8.0"] } diff --git a/homeassistant/components/hvv_departures/config_flow.py b/homeassistant/components/hvv_departures/config_flow.py index 0c909e2d8c1..a02796dbffb 100644 --- a/homeassistant/components/hvv_departures/config_flow.py +++ b/homeassistant/components/hvv_departures/config_flow.py @@ -9,7 +9,12 @@ from pygti.auth import GTI_DEFAULT_HOST from pygti.exceptions import CannotConnect, InvalidAuth import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_HOST, CONF_OFFSET, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from homeassistant.helpers import aiohttp_client @@ -44,13 +49,15 @@ class HVVDeparturesConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize component.""" - self.hub = None - self.data = None - self.stations = {} + self.hub: GTIHub | None = None + self.data: dict[str, Any] | None = None + self.stations: dict[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/hydrawise/binary_sensor.py b/homeassistant/components/hydrawise/binary_sensor.py index 0e00d237fae..9b6dcadf95f 100644 --- a/homeassistant/components/hydrawise/binary_sensor.py +++ b/homeassistant/components/hydrawise/binary_sensor.py @@ -110,7 +110,7 @@ async def async_setup_entry( ) async_add_entities(entities) platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service(SERVICE_RESUME, {}, "resume") + platform.async_register_entity_service(SERVICE_RESUME, None, "resume") platform.async_register_entity_service( SERVICE_START_WATERING, SCHEMA_START_WATERING, "start_watering" ) diff --git a/homeassistant/components/hydrawise/config_flow.py b/homeassistant/components/hydrawise/config_flow.py index ab9ebbb065d..a5e7d616fcf 100644 --- a/homeassistant/components/hydrawise/config_flow.py +++ b/homeassistant/components/hydrawise/config_flow.py @@ -90,7 +90,7 @@ class HydrawiseConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth after updating config to username/password.""" self.reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/hydrawise/icons.json b/homeassistant/components/hydrawise/icons.json index 1d1d349dbf9..5baf76454b7 100644 --- a/homeassistant/components/hydrawise/icons.json +++ b/homeassistant/components/hydrawise/icons.json @@ -31,8 +31,14 @@ } }, "services": { - "start_watering": "mdi:sprinkler-variant", - "suspend": "mdi:pause-circle-outline", - "resume": "mdi:play" + "start_watering": { + "service": "mdi:sprinkler-variant" + }, + "suspend": { + "service": "mdi:pause-circle-outline" + }, + "resume": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/hydrawise/manifest.json b/homeassistant/components/hydrawise/manifest.json index c6f4d7d8dcd..9b733cb73d0 100644 --- a/homeassistant/components/hydrawise/manifest.json +++ b/homeassistant/components/hydrawise/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/hydrawise", "iot_class": "cloud_polling", "loggers": ["pydrawise"], - "requirements": ["pydrawise==2024.6.4"] + "requirements": ["pydrawise==2024.8.0"] } diff --git a/homeassistant/components/ialarm/config_flow.py b/homeassistant/components/ialarm/config_flow.py index 08cb9868357..6df1b0f8290 100644 --- a/homeassistant/components/ialarm/config_flow.py +++ b/homeassistant/components/ialarm/config_flow.py @@ -1,11 +1,12 @@ """Config flow for Antifurto365 iAlarm integration.""" import logging +from typing import Any from pyialarm import IAlarm import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant @@ -31,7 +32,9 @@ class IAlarmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} mac = None diff --git a/homeassistant/components/icloud/__init__.py b/homeassistant/components/icloud/__init__.py index 431a1abd2e1..5bdfd00dc60 100644 --- a/homeassistant/components/icloud/__init__.py +++ b/homeassistant/components/icloud/__init__.py @@ -69,8 +69,6 @@ SERVICE_SCHEMA_LOST_DEVICE = vol.Schema( } ) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up an iCloud account from a config entry.""" diff --git a/homeassistant/components/icloud/account.py b/homeassistant/components/icloud/account.py index 988073384f8..9536cd9ee5c 100644 --- a/homeassistant/components/icloud/account.py +++ b/homeassistant/components/icloud/account.py @@ -117,7 +117,7 @@ class IcloudAccount: if self.api.requires_2fa: # Trigger a new log in to ensure the user enters the 2FA code again. - raise PyiCloudFailedLoginException + raise PyiCloudFailedLoginException # noqa: TRY301 except PyiCloudFailedLoginException: self.api = None diff --git a/homeassistant/components/icloud/config_flow.py b/homeassistant/components/icloud/config_flow.py index 36fe880ec79..544f751dc0b 100644 --- a/homeassistant/components/icloud/config_flow.py +++ b/homeassistant/components/icloud/config_flow.py @@ -43,7 +43,7 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize iCloud config flow.""" self.api = None self._username = None @@ -55,8 +55,8 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): self._trusted_device = None self._verification_code = None - self._existing_entry_data = None - self._description_placeholders = None + self._existing_entry_data: dict[str, Any] | None = None + self._description_placeholders: dict[str, str] | None = None def _show_setup_form(self, user_input=None, errors=None, step_id="user"): """Show the setup form to the user.""" @@ -141,7 +141,7 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): getattr, self.api, "devices" ) if not devices: - raise PyiCloudNoDevicesException + raise PyiCloudNoDevicesException # noqa: TRY301 except (PyiCloudServiceNotActivatedException, PyiCloudNoDevicesException): _LOGGER.error("No device found in the iCloud account: %s", self._username) self.api = None @@ -164,11 +164,13 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): await self.hass.config_entries.async_reload(entry.entry_id) return self.async_abort(reason="reauth_successful") - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - errors = {} + errors: dict[str, str] = {} - icloud_dir = Store(self.hass, STORAGE_VERSION, STORAGE_KEY) + icloud_dir = Store[Any](self.hass, STORAGE_VERSION, STORAGE_KEY) if not os.path.exists(icloud_dir.path): await self.hass.async_add_executor_job(os.makedirs, icloud_dir.path) @@ -264,13 +266,13 @@ class IcloudFlowHandler(ConfigFlow, domain=DOMAIN): if not await self.hass.async_add_executor_job( self.api.validate_2fa_code, self._verification_code ): - raise PyiCloudException("The code you entered is not valid.") + raise PyiCloudException("The code you entered is not valid.") # noqa: TRY301 elif not await self.hass.async_add_executor_job( self.api.validate_verification_code, self._trusted_device, self._verification_code, ): - raise PyiCloudException("The code you entered is not valid.") + raise PyiCloudException("The code you entered is not valid.") # noqa: TRY301 except PyiCloudException as error: # Reset to the initial 2FA state to allow the user to retry _LOGGER.error("Failed to verify verification code: %s", error) diff --git a/homeassistant/components/icloud/icons.json b/homeassistant/components/icloud/icons.json index 4ed856aabc1..16280a063e3 100644 --- a/homeassistant/components/icloud/icons.json +++ b/homeassistant/components/icloud/icons.json @@ -1,8 +1,16 @@ { "services": { - "update": "mdi:update", - "play_sound": "mdi:speaker-wireless", - "display_message": "mdi:message-alert", - "lost_device": "mdi:devices" + "update": { + "service": "mdi:update" + }, + "play_sound": { + "service": "mdi:speaker-wireless" + }, + "display_message": { + "service": "mdi:message-alert" + }, + "lost_device": { + "service": "mdi:devices" + } } } diff --git a/homeassistant/components/idasen_desk/__init__.py b/homeassistant/components/idasen_desk/__init__.py index f0d8013cb50..56a377ac2df 100644 --- a/homeassistant/components/idasen_desk/__init__.py +++ b/homeassistant/components/idasen_desk/__init__.py @@ -54,7 +54,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: try: if not await coordinator.async_connect(): - raise ConfigEntryNotReady(f"Unable to connect to desk {address}") + raise ConfigEntryNotReady(f"Unable to connect to desk {address}") # noqa: TRY301 except (AuthFailedError, TimeoutError, BleakError, Exception) as ex: raise ConfigEntryNotReady(f"Unable to connect to desk {address}") from ex diff --git a/homeassistant/components/ifttt/icons.json b/homeassistant/components/ifttt/icons.json index b943478a70b..a90d76f664a 100644 --- a/homeassistant/components/ifttt/icons.json +++ b/homeassistant/components/ifttt/icons.json @@ -1,6 +1,10 @@ { "services": { - "push_alarm_state": "mdi:security", - "trigger": "mdi:play" + "push_alarm_state": { + "service": "mdi:security" + }, + "trigger": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/ihc/icons.json b/homeassistant/components/ihc/icons.json index 73aab5f80d8..3842d1a48a6 100644 --- a/homeassistant/components/ihc/icons.json +++ b/homeassistant/components/ihc/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_runtime_value_bool": "mdi:toggle-switch", - "set_runtime_value_int": "mdi:numeric", - "set_runtime_value_float": "mdi:numeric", - "pulse": "mdi:pulse" + "set_runtime_value_bool": { + "service": "mdi:toggle-switch" + }, + "set_runtime_value_int": { + "service": "mdi:numeric" + }, + "set_runtime_value_float": { + "service": "mdi:numeric" + }, + "pulse": { + "service": "mdi:pulse" + } } } diff --git a/homeassistant/components/image_processing/icons.json b/homeassistant/components/image_processing/icons.json index b19d29c186d..ae95718e381 100644 --- a/homeassistant/components/image_processing/icons.json +++ b/homeassistant/components/image_processing/icons.json @@ -1,5 +1,7 @@ { "services": { - "scan": "mdi:qrcode-scan" + "scan": { + "service": "mdi:qrcode-scan" + } } } diff --git a/homeassistant/components/imap/icons.json b/homeassistant/components/imap/icons.json index 6672f9a4a7f..17a11d0fe22 100644 --- a/homeassistant/components/imap/icons.json +++ b/homeassistant/components/imap/icons.json @@ -10,9 +10,17 @@ } }, "services": { - "seen": "mdi:email-open-outline", - "move": "mdi:email-arrow-right-outline", - "delete": "mdi:trash-can-outline", - "fetch": "mdi:email-sync-outline" + "seen": { + "service": "mdi:email-open-outline" + }, + "move": { + "service": "mdi:email-arrow-right-outline" + }, + "delete": { + "service": "mdi:trash-can-outline" + }, + "fetch": { + "service": "mdi:email-sync-outline" + } } } diff --git a/homeassistant/components/influxdb/sensor.py b/homeassistant/components/influxdb/sensor.py index 03b6acb204c..cc601888f56 100644 --- a/homeassistant/components/influxdb/sensor.py +++ b/homeassistant/components/influxdb/sensor.py @@ -194,39 +194,30 @@ class InfluxSensor(SensorEntity): """Initialize the sensor.""" self._name = query.get(CONF_NAME) self._unit_of_measurement = query.get(CONF_UNIT_OF_MEASUREMENT) - value_template = query.get(CONF_VALUE_TEMPLATE) - if value_template is not None: - self._value_template = value_template - self._value_template.hass = hass - else: - self._value_template = None + self._value_template = query.get(CONF_VALUE_TEMPLATE) self._state = None self._hass = hass self._attr_unique_id = query.get(CONF_UNIQUE_ID) if query[CONF_LANGUAGE] == LANGUAGE_FLUX: - query_clause = query.get(CONF_QUERY) - query_clause.hass = hass self.data = InfluxFluxSensorData( influx, query.get(CONF_BUCKET), query.get(CONF_RANGE_START), query.get(CONF_RANGE_STOP), - query_clause, + query.get(CONF_QUERY), query.get(CONF_IMPORTS), query.get(CONF_GROUP_FUNCTION), ) else: - where_clause = query.get(CONF_WHERE) - where_clause.hass = hass self.data = InfluxQLSensorData( influx, query.get(CONF_DB_NAME), query.get(CONF_GROUP_FUNCTION), query.get(CONF_FIELD), query.get(CONF_MEASUREMENT_NAME), - where_clause, + query.get(CONF_WHERE), ) @property diff --git a/homeassistant/components/input_boolean/__init__.py b/homeassistant/components/input_boolean/__init__.py index 57165c5508a..54457ab2fb7 100644 --- a/homeassistant/components/input_boolean/__init__.py +++ b/homeassistant/components/input_boolean/__init__.py @@ -138,11 +138,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: schema=RELOAD_SERVICE_SCHEMA, ) - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") return True diff --git a/homeassistant/components/input_boolean/icons.json b/homeassistant/components/input_boolean/icons.json index dc595a60fba..088c9094b3f 100644 --- a/homeassistant/components/input_boolean/icons.json +++ b/homeassistant/components/input_boolean/icons.json @@ -8,9 +8,17 @@ } }, "services": { - "toggle": "mdi:toggle-switch", - "turn_off": "mdi:toggle-switch-off", - "turn_on": "mdi:toggle-switch", - "reload": "mdi:reload" + "toggle": { + "service": "mdi:toggle-switch" + }, + "turn_off": { + "service": "mdi:toggle-switch-off" + }, + "turn_on": { + "service": "mdi:toggle-switch" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_button/__init__.py b/homeassistant/components/input_button/__init__.py index 1488d80a1f5..6584b40fb55 100644 --- a/homeassistant/components/input_button/__init__.py +++ b/homeassistant/components/input_button/__init__.py @@ -123,7 +123,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: schema=RELOAD_SERVICE_SCHEMA, ) - component.async_register_entity_service(SERVICE_PRESS, {}, "_async_press_action") + component.async_register_entity_service(SERVICE_PRESS, None, "_async_press_action") return True diff --git a/homeassistant/components/input_button/icons.json b/homeassistant/components/input_button/icons.json index 226b8ede110..20d41b4934a 100644 --- a/homeassistant/components/input_button/icons.json +++ b/homeassistant/components/input_button/icons.json @@ -1,6 +1,10 @@ { "services": { - "press": "mdi:gesture-tap-button", - "reload": "mdi:reload" + "press": { + "service": "mdi:gesture-tap-button" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_datetime/__init__.py b/homeassistant/components/input_datetime/__init__.py index 5d2c1e7ff8d..dcc2865acad 100644 --- a/homeassistant/components/input_datetime/__init__.py +++ b/homeassistant/components/input_datetime/__init__.py @@ -176,14 +176,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( "set_datetime", vol.All( - vol.Schema( + cv.make_entity_service_schema( { vol.Optional(ATTR_DATE): cv.date, vol.Optional(ATTR_TIME): cv.time, vol.Optional(ATTR_DATETIME): cv.datetime, vol.Optional(ATTR_TIMESTAMP): vol.Coerce(float), }, - extra=vol.ALLOW_EXTRA, ), cv.has_at_least_one_key( ATTR_DATE, ATTR_TIME, ATTR_DATETIME, ATTR_TIMESTAMP diff --git a/homeassistant/components/input_datetime/icons.json b/homeassistant/components/input_datetime/icons.json index de899023cf2..f3676f02220 100644 --- a/homeassistant/components/input_datetime/icons.json +++ b/homeassistant/components/input_datetime/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_datetime": "mdi:calendar-clock", - "reload": "mdi:reload" + "set_datetime": { + "service": "mdi:calendar-clock" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_number/__init__.py b/homeassistant/components/input_number/__init__.py index f55ceabc6f0..d52bfedfe77 100644 --- a/homeassistant/components/input_number/__init__.py +++ b/homeassistant/components/input_number/__init__.py @@ -157,9 +157,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: "async_set_value", ) - component.async_register_entity_service(SERVICE_INCREMENT, {}, "async_increment") + component.async_register_entity_service(SERVICE_INCREMENT, None, "async_increment") - component.async_register_entity_service(SERVICE_DECREMENT, {}, "async_decrement") + component.async_register_entity_service(SERVICE_DECREMENT, None, "async_decrement") return True diff --git a/homeassistant/components/input_number/icons.json b/homeassistant/components/input_number/icons.json index d1423838491..9f90582308b 100644 --- a/homeassistant/components/input_number/icons.json +++ b/homeassistant/components/input_number/icons.json @@ -1,8 +1,16 @@ { "services": { - "decrement": "mdi:minus", - "increment": "mdi:plus", - "set_value": "mdi:numeric", - "reload": "mdi:reload" + "decrement": { + "service": "mdi:minus" + }, + "increment": { + "service": "mdi:plus" + }, + "set_value": { + "service": "mdi:numeric" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_select/__init__.py b/homeassistant/components/input_select/__init__.py index 44d2df02a92..6efe16240cb 100644 --- a/homeassistant/components/input_select/__init__.py +++ b/homeassistant/components/input_select/__init__.py @@ -183,13 +183,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SELECT_FIRST, - {}, + None, InputSelect.async_first.__name__, ) component.async_register_entity_service( SERVICE_SELECT_LAST, - {}, + None, InputSelect.async_last.__name__, ) diff --git a/homeassistant/components/input_select/icons.json b/homeassistant/components/input_select/icons.json index 03b477ddb36..6ef5cfaf96a 100644 --- a/homeassistant/components/input_select/icons.json +++ b/homeassistant/components/input_select/icons.json @@ -1,11 +1,25 @@ { "services": { - "select_next": "mdi:skip-next", - "select_option": "mdi:check", - "select_previous": "mdi:skip-previous", - "select_first": "mdi:skip-backward", - "select_last": "mdi:skip-forward", - "set_options": "mdi:cog", - "reload": "mdi:reload" + "select_next": { + "service": "mdi:skip-next" + }, + "select_option": { + "service": "mdi:check" + }, + "select_previous": { + "service": "mdi:skip-previous" + }, + "select_first": { + "service": "mdi:skip-backward" + }, + "select_last": { + "service": "mdi:skip-forward" + }, + "set_options": { + "service": "mdi:cog" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/input_text/icons.json b/homeassistant/components/input_text/icons.json index 0190e4ffba2..8fca66668bc 100644 --- a/homeassistant/components/input_text/icons.json +++ b/homeassistant/components/input_text/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_value": "mdi:form-textbox", - "reload": "mdi:reload" + "set_value": { + "service": "mdi:form-textbox" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/insteon/__init__.py b/homeassistant/components/insteon/__init__.py index 0ec2434bc82..ff72f90a87e 100644 --- a/homeassistant/components/insteon/__init__.py +++ b/homeassistant/components/insteon/__init__.py @@ -10,8 +10,7 @@ from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import CONF_PLATFORM, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers import device_registry as dr from . import api from .const import ( @@ -36,8 +35,6 @@ from .utils import ( _LOGGER = logging.getLogger(__name__) OPTIONS = "options" -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_get_device_config(hass, config_entry): """Initiate the connection and services.""" @@ -77,11 +74,6 @@ async def close_insteon_connection(*args): await async_close() -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the Insteon platform.""" - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up an Insteon entry.""" diff --git a/homeassistant/components/insteon/api/config.py b/homeassistant/components/insteon/api/config.py index 8a617911d1e..88c062c3271 100644 --- a/homeassistant/components/insteon/api/config.py +++ b/homeassistant/components/insteon/api/config.py @@ -211,7 +211,7 @@ async def websocket_update_modem_config( """Get the schema for the modem configuration.""" config = msg["config"] config_entry = get_insteon_config_entry(hass) - is_connected = devices.modem.connected + is_connected = devices.modem is not None and devices.modem.connected if not await _async_connect(**config): connection.send_error( diff --git a/homeassistant/components/insteon/config_flow.py b/homeassistant/components/insteon/config_flow.py index baf06b13860..7a701db1b82 100644 --- a/homeassistant/components/insteon/config_flow.py +++ b/homeassistant/components/insteon/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from pyinsteon import async_connect @@ -54,7 +55,9 @@ class InsteonFlowHandler(ConfigFlow, domain=DOMAIN): _device_name: str | None = None discovered_conf: dict[str, str] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Init the config flow.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/insteon/icons.json b/homeassistant/components/insteon/icons.json index 4d015e13b0d..530006ca7d4 100644 --- a/homeassistant/components/insteon/icons.json +++ b/homeassistant/components/insteon/icons.json @@ -1,15 +1,37 @@ { "services": { - "add_all_link": "mdi:link-variant", - "delete_all_link": "mdi:link-variant-remove", - "load_all_link_database": "mdi:database", - "print_all_link_database": "mdi:database-export", - "print_im_all_link_database": "mdi:database-export", - "x10_all_units_off": "mdi:power-off", - "x10_all_lights_on": "mdi:lightbulb-on", - "x10_all_lights_off": "mdi:lightbulb-off", - "scene_on": "mdi:palette", - "scene_off": "mdi:palette-outline", - "add_default_links": "mdi:link-variant-plus" + "add_all_link": { + "service": "mdi:link-variant" + }, + "delete_all_link": { + "service": "mdi:link-variant-remove" + }, + "load_all_link_database": { + "service": "mdi:database" + }, + "print_all_link_database": { + "service": "mdi:database-export" + }, + "print_im_all_link_database": { + "service": "mdi:database-export" + }, + "x10_all_units_off": { + "service": "mdi:power-off" + }, + "x10_all_lights_on": { + "service": "mdi:lightbulb-on" + }, + "x10_all_lights_off": { + "service": "mdi:lightbulb-off" + }, + "scene_on": { + "service": "mdi:palette" + }, + "scene_off": { + "service": "mdi:palette-outline" + }, + "add_default_links": { + "service": "mdi:link-variant-plus" + } } } diff --git a/homeassistant/components/intellifire/__init__.py b/homeassistant/components/intellifire/__init__.py index 7af472c8745..7609398673b 100644 --- a/homeassistant/components/intellifire/__init__.py +++ b/homeassistant/components/intellifire/__init__.py @@ -2,15 +2,17 @@ from __future__ import annotations -from aiohttp import ClientConnectionError -from intellifire4py import IntellifireControlAsync -from intellifire4py.exceptions import LoginException -from intellifire4py.intellifire import IntellifireAPICloud, IntellifireAPILocal +import asyncio + +from intellifire4py import UnifiedFireplace +from intellifire4py.cloud_interface import IntelliFireCloudInterface +from intellifire4py.model import IntelliFireCommonFireplaceData from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( CONF_API_KEY, CONF_HOST, + CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME, Platform, @@ -18,7 +20,18 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from .const import CONF_USER_ID, DOMAIN, LOGGER +from .const import ( + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, + INIT_WAIT_TIME_SECONDS, + LOGGER, + STARTUP_TIMEOUT, +) from .coordinator import IntellifireDataUpdateCoordinator PLATFORMS = [ @@ -32,79 +45,114 @@ PLATFORMS = [ ] +def _construct_common_data(entry: ConfigEntry) -> IntelliFireCommonFireplaceData: + """Convert config entry data into IntelliFireCommonFireplaceData.""" + + return IntelliFireCommonFireplaceData( + auth_cookie=entry.data[CONF_AUTH_COOKIE], + user_id=entry.data[CONF_USER_ID], + web_client_id=entry.data[CONF_WEB_CLIENT_ID], + serial=entry.data[CONF_SERIAL], + api_key=entry.data[CONF_API_KEY], + ip_address=entry.data[CONF_IP_ADDRESS], + read_mode=entry.options[CONF_READ_MODE], + control_mode=entry.options[CONF_CONTROL_MODE], + ) + + +async def async_migrate_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: + """Migrate entries.""" + LOGGER.debug( + "Migrating configuration from version %s.%s", + config_entry.version, + config_entry.minor_version, + ) + + if config_entry.version == 1: + new = {**config_entry.data} + + if config_entry.minor_version < 2: + username = config_entry.data[CONF_USERNAME] + password = config_entry.data[CONF_PASSWORD] + + # Create a Cloud Interface + async with IntelliFireCloudInterface() as cloud_interface: + await cloud_interface.login_with_credentials( + username=username, password=password + ) + + new_data = cloud_interface.user_data.get_data_for_ip(new[CONF_HOST]) + + if not new_data: + raise ConfigEntryAuthFailed + new[CONF_API_KEY] = new_data.api_key + new[CONF_WEB_CLIENT_ID] = new_data.web_client_id + new[CONF_AUTH_COOKIE] = new_data.auth_cookie + + new[CONF_IP_ADDRESS] = new_data.ip_address + new[CONF_SERIAL] = new_data.serial + + hass.config_entries.async_update_entry( + config_entry, + data=new, + options={CONF_READ_MODE: "local", CONF_CONTROL_MODE: "local"}, + unique_id=new[CONF_SERIAL], + version=1, + minor_version=2, + ) + LOGGER.debug("Pseudo Migration %s successful", config_entry.version) + + return True + + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up IntelliFire from a config entry.""" - LOGGER.debug("Setting up config entry: %s", entry.unique_id) if CONF_USERNAME not in entry.data: - LOGGER.debug("Old config entry format detected: %s", entry.unique_id) + LOGGER.debug("Config entry without username detected: %s", entry.unique_id) raise ConfigEntryAuthFailed - ift_control = IntellifireControlAsync( - fireplace_ip=entry.data[CONF_HOST], - ) try: - await ift_control.login( - username=entry.data[CONF_USERNAME], - password=entry.data[CONF_PASSWORD], + fireplace: UnifiedFireplace = ( + await UnifiedFireplace.build_fireplace_from_common( + _construct_common_data(entry) + ) ) - except (ConnectionError, ClientConnectionError) as err: - raise ConfigEntryNotReady from err - except LoginException as err: - raise ConfigEntryAuthFailed(err) from err - - finally: - await ift_control.close() - - # Extract API Key and User_ID from ift_control - # Eventually this will migrate to using IntellifireAPICloud - - if CONF_USER_ID not in entry.data or CONF_API_KEY not in entry.data: - LOGGER.info( - "Updating intellifire config entry for %s with api information", - entry.unique_id, - ) - cloud_api = IntellifireAPICloud() - await cloud_api.login( - username=entry.data[CONF_USERNAME], - password=entry.data[CONF_PASSWORD], - ) - api_key = cloud_api.get_fireplace_api_key() - user_id = cloud_api.get_user_id() - # Update data entry - hass.config_entries.async_update_entry( - entry, - data={ - **entry.data, - CONF_API_KEY: api_key, - CONF_USER_ID: user_id, - }, + LOGGER.debug("Waiting for Fireplace to Initialize") + await asyncio.wait_for( + _async_wait_for_initialization(fireplace), timeout=STARTUP_TIMEOUT ) + except TimeoutError as err: + raise ConfigEntryNotReady( + "Initialization of fireplace timed out after 10 minutes" + ) from err - else: - api_key = entry.data[CONF_API_KEY] - user_id = entry.data[CONF_USER_ID] - - # Instantiate local control - api = IntellifireAPILocal( - fireplace_ip=entry.data[CONF_HOST], - api_key=api_key, - user_id=user_id, + # Construct coordinator + data_update_coordinator = IntellifireDataUpdateCoordinator( + hass=hass, fireplace=fireplace ) - # Define the update coordinator - coordinator = IntellifireDataUpdateCoordinator( - hass=hass, - api=api, - ) + LOGGER.debug("Fireplace to Initialized - Awaiting first refresh") + await data_update_coordinator.async_config_entry_first_refresh() + + hass.data.setdefault(DOMAIN, {})[entry.entry_id] = data_update_coordinator - await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True +async def _async_wait_for_initialization( + fireplace: UnifiedFireplace, timeout=STARTUP_TIMEOUT +): + """Wait for a fireplace to be initialized.""" + while ( + fireplace.data.ipv4_address == "127.0.0.1" and fireplace.data.serial == "unset" + ): + LOGGER.debug(f"Waiting for fireplace to initialize [{fireplace.read_mode}]") + await asyncio.sleep(INIT_WAIT_TIME_SECONDS) + + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): diff --git a/homeassistant/components/intellifire/binary_sensor.py b/homeassistant/components/intellifire/binary_sensor.py index a1b8865c876..f0a5d84fa62 100644 --- a/homeassistant/components/intellifire/binary_sensor.py +++ b/homeassistant/components/intellifire/binary_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass -from intellifire4py import IntellifirePollData +from intellifire4py.model import IntelliFirePollData from homeassistant.components.binary_sensor import ( BinarySensorDeviceClass, @@ -26,7 +26,7 @@ from .entity import IntellifireEntity class IntellifireBinarySensorRequiredKeysMixin: """Mixin for required keys.""" - value_fn: Callable[[IntellifirePollData], bool] + value_fn: Callable[[IntelliFirePollData], bool] @dataclass(frozen=True) diff --git a/homeassistant/components/intellifire/climate.py b/homeassistant/components/intellifire/climate.py index ed4facffc67..4eddde5ff10 100644 --- a/homeassistant/components/intellifire/climate.py +++ b/homeassistant/components/intellifire/climate.py @@ -69,7 +69,7 @@ class IntellifireClimate(IntellifireEntity, ClimateEntity): super().__init__(coordinator, description) if coordinator.data.thermostat_on: - self.last_temp = coordinator.data.thermostat_setpoint_c + self.last_temp = int(coordinator.data.thermostat_setpoint_c) @property def hvac_mode(self) -> HVACMode: diff --git a/homeassistant/components/intellifire/config_flow.py b/homeassistant/components/intellifire/config_flow.py index 268fc6623d3..56f0d5ca6a5 100644 --- a/homeassistant/components/intellifire/config_flow.py +++ b/homeassistant/components/intellifire/config_flow.py @@ -7,16 +7,33 @@ from dataclasses import dataclass from typing import Any from aiohttp import ClientConnectionError -from intellifire4py import AsyncUDPFireplaceFinder -from intellifire4py.exceptions import LoginException -from intellifire4py.intellifire import IntellifireAPICloud, IntellifireAPILocal +from intellifire4py.cloud_interface import IntelliFireCloudInterface +from intellifire4py.exceptions import LoginError +from intellifire4py.local_api import IntelliFireAPILocal +from intellifire4py.model import IntelliFireCommonFireplaceData import voluptuous as vol from homeassistant.components.dhcp import DhcpServiceInfo -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) -from .const import CONF_USER_ID, DOMAIN, LOGGER +from .const import ( + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, + LOGGER, +) STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_HOST): str}) @@ -31,17 +48,20 @@ class DiscoveredHostInfo: serial: str | None -async def validate_host_input(host: str, dhcp_mode: bool = False) -> str: +async def _async_poll_local_fireplace_for_serial( + host: str, dhcp_mode: bool = False +) -> str: """Validate the user input allows us to connect. Data has the keys from STEP_USER_DATA_SCHEMA with values provided by the user. """ LOGGER.debug("Instantiating IntellifireAPI with host: [%s]", host) - api = IntellifireAPILocal(fireplace_ip=host) + api = IntelliFireAPILocal(fireplace_ip=host) await api.poll(suppress_warnings=dhcp_mode) serial = api.data.serial LOGGER.debug("Found a fireplace: %s", serial) + # Return the serial number which will be used to calculate a unique ID for the device/sensors return serial @@ -50,239 +70,206 @@ class IntelliFireConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for IntelliFire.""" VERSION = 1 + MINOR_VERSION = 2 def __init__(self) -> None: """Initialize the Config Flow Handler.""" - self._host: str = "" - self._serial: str = "" - self._not_configured_hosts: list[DiscoveredHostInfo] = [] + + # DHCP Variables + self._dhcp_discovered_serial: str = "" # used only in discovery mode self._discovered_host: DiscoveredHostInfo + self._dhcp_mode = False + self._is_reauth = False + + self._not_configured_hosts: list[DiscoveredHostInfo] = [] self._reauth_needed: DiscoveredHostInfo - async def _find_fireplaces(self): - """Perform UDP discovery.""" - fireplace_finder = AsyncUDPFireplaceFinder() - discovered_hosts = await fireplace_finder.search_fireplace(timeout=12) - configured_hosts = { - entry.data[CONF_HOST] - for entry in self._async_current_entries(include_ignore=False) - if CONF_HOST in entry.data # CONF_HOST will be missing for ignored entries - } + self._configured_serials: list[str] = [] - self._not_configured_hosts = [ - DiscoveredHostInfo(ip, None) - for ip in discovered_hosts - if ip not in configured_hosts - ] - LOGGER.debug("Discovered Hosts: %s", discovered_hosts) - LOGGER.debug("Configured Hosts: %s", configured_hosts) - LOGGER.debug("Not Configured Hosts: %s", self._not_configured_hosts) - - async def validate_api_access_and_create_or_update( - self, *, host: str, username: str, password: str, serial: str - ): - """Validate username/password against api.""" - LOGGER.debug("Attempting login to iftapi with: %s", username) - - ift_cloud = IntellifireAPICloud() - await ift_cloud.login(username=username, password=password) - api_key = ift_cloud.get_fireplace_api_key() - user_id = ift_cloud.get_user_id() - - data = { - CONF_HOST: host, - CONF_PASSWORD: password, - CONF_USERNAME: username, - CONF_API_KEY: api_key, - CONF_USER_ID: user_id, - } - - # Update or Create - existing_entry = await self.async_set_unique_id(serial) - if existing_entry: - self.hass.config_entries.async_update_entry(existing_entry, data=data) - await self.hass.config_entries.async_reload(existing_entry.entry_id) - return self.async_abort(reason="reauth_successful") - return self.async_create_entry(title=f"Fireplace {serial}", data=data) - - async def async_step_api_config( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Configure API access.""" - - errors = {} - control_schema = vol.Schema( - { - vol.Required(CONF_USERNAME): str, - vol.Required(CONF_PASSWORD): str, - } - ) - - if user_input is not None: - control_schema = vol.Schema( - { - vol.Required( - CONF_USERNAME, default=user_input.get(CONF_USERNAME, "") - ): str, - vol.Required( - CONF_PASSWORD, default=user_input.get(CONF_PASSWORD, "") - ): str, - } - ) - - try: - return await self.validate_api_access_and_create_or_update( - host=self._host, - username=user_input[CONF_USERNAME], - password=user_input[CONF_PASSWORD], - serial=self._serial, - ) - - except (ConnectionError, ClientConnectionError): - errors["base"] = "iftapi_connect" - LOGGER.error( - "Could not connect to iftapi.net over https - verify connectivity" - ) - except LoginException: - errors["base"] = "api_error" - LOGGER.error("Invalid credentials for iftapi.net") - - return self.async_show_form( - step_id="api_config", errors=errors, data_schema=control_schema - ) - - async def _async_validate_ip_and_continue(self, host: str) -> ConfigFlowResult: - """Validate local config and continue.""" - self._async_abort_entries_match({CONF_HOST: host}) - self._serial = await validate_host_input(host) - await self.async_set_unique_id(self._serial, raise_on_progress=False) - self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - # Store current data and jump to next stage - self._host = host - - return await self.async_step_api_config() - - async def async_step_manual_device_entry(self, user_input=None): - """Handle manual input of local IP configuration.""" - LOGGER.debug("STEP: manual_device_entry") - errors = {} - self._host = user_input.get(CONF_HOST) if user_input else None - if user_input is not None: - try: - return await self._async_validate_ip_and_continue(self._host) - except (ConnectionError, ClientConnectionError): - errors["base"] = "cannot_connect" - - return self.async_show_form( - step_id="manual_device_entry", - errors=errors, - data_schema=vol.Schema({vol.Required(CONF_HOST, default=self._host): str}), - ) - - async def async_step_pick_device( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Pick which device to configure.""" - errors = {} - LOGGER.debug("STEP: pick_device") - - if user_input is not None: - if user_input[CONF_HOST] == MANUAL_ENTRY_STRING: - return await self.async_step_manual_device_entry() - - try: - return await self._async_validate_ip_and_continue(user_input[CONF_HOST]) - except (ConnectionError, ClientConnectionError): - errors["base"] = "cannot_connect" - - return self.async_show_form( - step_id="pick_device", - errors=errors, - data_schema=vol.Schema( - { - vol.Required(CONF_HOST): vol.In( - [host.ip for host in self._not_configured_hosts] - + [MANUAL_ENTRY_STRING] - ) - } - ), - ) + # Define a cloud api interface we can use + self.cloud_api_interface = IntelliFireCloudInterface() async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Start the user flow.""" - # Launch fireplaces discovery - await self._find_fireplaces() - LOGGER.debug("STEP: user") - if self._not_configured_hosts: - LOGGER.debug("Running Step: pick_device") - return await self.async_step_pick_device() - LOGGER.debug("Running Step: manual_device_entry") - return await self.async_step_manual_device_entry() + current_entries = self._async_current_entries(include_ignore=False) + self._configured_serials = [ + entry.data[CONF_SERIAL] for entry in current_entries + ] + + return await self.async_step_cloud_api() + + async def async_step_cloud_api( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Authenticate against IFTAPI Cloud in order to see configured devices. + + Local control of IntelliFire devices requires that the user download the correct API KEY which is only available on the cloud. Cloud control of the devices requires the user has at least once authenticated against the cloud and a set of cookie variables have been stored locally. + + """ + errors: dict[str, str] = {} + LOGGER.debug("STEP: cloud_api") + + if user_input is not None: + try: + async with self.cloud_api_interface as cloud_interface: + await cloud_interface.login_with_credentials( + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + ) + + # If login was successful pass username/password to next step + return await self.async_step_pick_cloud_device() + except LoginError: + errors["base"] = "api_error" + + return self.async_show_form( + step_id="cloud_api", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } + ), + ) + + async def async_step_pick_cloud_device( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Step to select a device from the cloud. + + We can only get here if we have logged in. If there is only one device available it will be auto-configured, + else the user will be given a choice to pick a device. + """ + errors: dict[str, str] = {} + LOGGER.debug( + f"STEP: pick_cloud_device: {user_input} - DHCP_MODE[{self._dhcp_mode}" + ) + + if self._dhcp_mode or user_input is not None: + if self._dhcp_mode: + serial = self._dhcp_discovered_serial + LOGGER.debug(f"DHCP Mode detected for serial [{serial}]") + if user_input is not None: + serial = user_input[CONF_SERIAL] + + # Run a unique ID Check prior to anything else + await self.async_set_unique_id(serial) + self._abort_if_unique_id_configured(updates={CONF_SERIAL: serial}) + + # If Serial is Good obtain fireplace and configure + fireplace = self.cloud_api_interface.user_data.get_data_for_serial(serial) + if fireplace: + return await self._async_create_config_entry_from_common_data( + fireplace=fireplace + ) + + # Parse User Data to see if we auto-configure or prompt for selection: + user_data = self.cloud_api_interface.user_data + + available_fireplaces: list[IntelliFireCommonFireplaceData] = [ + fp + for fp in user_data.fireplaces + if fp.serial not in self._configured_serials + ] + + # Abort if all devices have been configured + if not available_fireplaces: + return self.async_abort(reason="no_available_devices") + + # If there is a single fireplace configure it + if len(available_fireplaces) == 1: + if self._is_reauth: + reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self._async_create_config_entry_from_common_data( + fireplace=available_fireplaces[0], existing_entry=reauth_entry + ) + + return await self._async_create_config_entry_from_common_data( + fireplace=available_fireplaces[0] + ) + + return self.async_show_form( + step_id="pick_cloud_device", + errors=errors, + data_schema=vol.Schema( + { + vol.Required(CONF_SERIAL): vol.In( + [fp.serial for fp in available_fireplaces] + ) + } + ), + ) + + async def _async_create_config_entry_from_common_data( + self, + fireplace: IntelliFireCommonFireplaceData, + existing_entry: ConfigEntry | None = None, + ) -> ConfigFlowResult: + """Construct a config entry based on an object of IntelliFireCommonFireplaceData.""" + + data = { + CONF_IP_ADDRESS: fireplace.ip_address, + CONF_API_KEY: fireplace.api_key, + CONF_SERIAL: fireplace.serial, + CONF_AUTH_COOKIE: fireplace.auth_cookie, + CONF_WEB_CLIENT_ID: fireplace.web_client_id, + CONF_USER_ID: fireplace.user_id, + CONF_USERNAME: self.cloud_api_interface.user_data.username, + CONF_PASSWORD: self.cloud_api_interface.user_data.password, + } + + options = {CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_LOCAL} + + if existing_entry: + return self.async_update_reload_and_abort( + existing_entry, data=data, options=options + ) + return self.async_create_entry( + title=f"Fireplace {fireplace.serial}", data=data, options=options + ) async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" LOGGER.debug("STEP: reauth") + self._is_reauth = True entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - assert entry - assert entry.unique_id # populate the expected vars - self._serial = entry.unique_id - self._host = entry.data[CONF_HOST] + self._dhcp_discovered_serial = entry.data[CONF_SERIAL] # type: ignore[union-attr] - placeholders = {CONF_HOST: self._host, "serial": self._serial} + placeholders = {"serial": self._dhcp_discovered_serial} self.context["title_placeholders"] = placeholders - return await self.async_step_api_config() + + return await self.async_step_cloud_api() async def async_step_dhcp( self, discovery_info: DhcpServiceInfo ) -> ConfigFlowResult: """Handle DHCP Discovery.""" + self._dhcp_mode = True # Run validation logic on ip - host = discovery_info.ip - LOGGER.debug("STEP: dhcp for host %s", host) + ip_address = discovery_info.ip + LOGGER.debug("STEP: dhcp for ip_address %s", ip_address) - self._async_abort_entries_match({CONF_HOST: host}) + self._async_abort_entries_match({CONF_IP_ADDRESS: ip_address}) try: - self._serial = await validate_host_input(host, dhcp_mode=True) + self._dhcp_discovered_serial = await _async_poll_local_fireplace_for_serial( + ip_address, dhcp_mode=True + ) except (ConnectionError, ClientConnectionError): LOGGER.debug( - "DHCP Discovery has determined %s is not an IntelliFire device", host + "DHCP Discovery has determined %s is not an IntelliFire device", + ip_address, ) return self.async_abort(reason="not_intellifire_device") - await self.async_set_unique_id(self._serial) - self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - self._discovered_host = DiscoveredHostInfo(ip=host, serial=self._serial) - - placeholders = {CONF_HOST: host, "serial": self._serial} - self.context["title_placeholders"] = placeholders - self._set_confirm_only() - - return await self.async_step_dhcp_confirm() - - async def async_step_dhcp_confirm(self, user_input=None): - """Attempt to confirm.""" - - LOGGER.debug("STEP: dhcp_confirm") - # Add the hosts one by one - host = self._discovered_host.ip - serial = self._discovered_host.serial - - if user_input is None: - # Show the confirmation dialog - return self.async_show_form( - step_id="dhcp_confirm", - description_placeholders={CONF_HOST: host, "serial": serial}, - ) - - return self.async_create_entry( - title=f"Fireplace {serial}", - data={CONF_HOST: host}, - ) + return await self.async_step_cloud_api() diff --git a/homeassistant/components/intellifire/const.py b/homeassistant/components/intellifire/const.py index 5c8af1eefe9..f194eeaf4e2 100644 --- a/homeassistant/components/intellifire/const.py +++ b/homeassistant/components/intellifire/const.py @@ -5,11 +5,22 @@ from __future__ import annotations import logging DOMAIN = "intellifire" - -CONF_USER_ID = "user_id" - LOGGER = logging.getLogger(__package__) +DEFAULT_THERMOSTAT_TEMP = 21 + +CONF_USER_ID = "user_id" # part of the cloud cookie +CONF_WEB_CLIENT_ID = "web_client_id" # part of the cloud cookie +CONF_AUTH_COOKIE = "auth_cookie" # part of the cloud cookie CONF_SERIAL = "serial" +CONF_READ_MODE = "cloud_read" +CONF_CONTROL_MODE = "cloud_control" -DEFAULT_THERMOSTAT_TEMP = 21 + +API_MODE_LOCAL = "local" +API_MODE_CLOUD = "cloud" + + +STARTUP_TIMEOUT = 600 + +INIT_WAIT_TIME_SECONDS = 10 diff --git a/homeassistant/components/intellifire/coordinator.py b/homeassistant/components/intellifire/coordinator.py index 0a46ff61435..b4f03f4b5c8 100644 --- a/homeassistant/components/intellifire/coordinator.py +++ b/homeassistant/components/intellifire/coordinator.py @@ -2,27 +2,27 @@ from __future__ import annotations -import asyncio from datetime import timedelta -from aiohttp import ClientConnectionError -from intellifire4py import IntellifirePollData -from intellifire4py.intellifire import IntellifireAPILocal +from intellifire4py import UnifiedFireplace +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData +from intellifire4py.read import IntelliFireDataProvider from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import DOMAIN, LOGGER -class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData]): +class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntelliFirePollData]): """Class to manage the polling of the fireplace API.""" def __init__( self, hass: HomeAssistant, - api: IntellifireAPILocal, + fireplace: UnifiedFireplace, ) -> None: """Initialize the Coordinator.""" super().__init__( @@ -31,36 +31,21 @@ class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData name=DOMAIN, update_interval=timedelta(seconds=15), ) - self._api = api - async def _async_update_data(self) -> IntellifirePollData: - if not self._api.is_polling_in_background: - LOGGER.info("Starting Intellifire Background Polling Loop") - await self._api.start_background_polling() - - # Don't return uninitialized poll data - async with asyncio.timeout(15): - try: - await self._api.poll() - except (ConnectionError, ClientConnectionError) as exception: - raise UpdateFailed from exception - - LOGGER.debug("Failure Count %d", self._api.failed_poll_attempts) - if self._api.failed_poll_attempts > 10: - LOGGER.debug("Too many polling errors - raising exception") - raise UpdateFailed - - return self._api.data + self.fireplace = fireplace @property - def read_api(self) -> IntellifireAPILocal: + def read_api(self) -> IntelliFireDataProvider: """Return the Status API pointer.""" - return self._api + return self.fireplace.read_api @property - def control_api(self) -> IntellifireAPILocal: + def control_api(self) -> IntelliFireController: """Return the control API.""" - return self._api + return self.fireplace.control_api + + async def _async_update_data(self) -> IntelliFirePollData: + return self.fireplace.data @property def device_info(self) -> DeviceInfo: @@ -69,7 +54,6 @@ class IntellifireDataUpdateCoordinator(DataUpdateCoordinator[IntellifirePollData manufacturer="Hearth and Home", model="IFT-WFM", name="IntelliFire", - identifiers={("IntelliFire", f"{self.read_api.data.serial}]")}, - sw_version=self.read_api.data.fw_ver_str, - configuration_url=f"http://{self._api.fireplace_ip}/poll", + identifiers={("IntelliFire", str(self.fireplace.serial))}, + configuration_url=f"http://{self.fireplace.ip_address}/poll", ) diff --git a/homeassistant/components/intellifire/entity.py b/homeassistant/components/intellifire/entity.py index 3b35c9dabd8..571c4717ac2 100644 --- a/homeassistant/components/intellifire/entity.py +++ b/homeassistant/components/intellifire/entity.py @@ -9,7 +9,7 @@ from . import IntellifireDataUpdateCoordinator class IntellifireEntity(CoordinatorEntity[IntellifireDataUpdateCoordinator]): - """Define a generic class for Intellifire entities.""" + """Define a generic class for IntelliFire entities.""" _attr_attribution = "Data provided by unpublished Intellifire API" _attr_has_entity_name = True @@ -22,6 +22,8 @@ class IntellifireEntity(CoordinatorEntity[IntellifireDataUpdateCoordinator]): """Class initializer.""" super().__init__(coordinator=coordinator) self.entity_description = description - self._attr_unique_id = f"{description.key}_{coordinator.read_api.data.serial}" + self._attr_unique_id = f"{description.key}_{coordinator.fireplace.serial}" + self.identifiers = ({("IntelliFire", f"{coordinator.fireplace.serial}]")},) + # Configure the Device Info self._attr_device_info = self.coordinator.device_info diff --git a/homeassistant/components/intellifire/fan.py b/homeassistant/components/intellifire/fan.py index f68827b0a56..dc2fc279a5d 100644 --- a/homeassistant/components/intellifire/fan.py +++ b/homeassistant/components/intellifire/fan.py @@ -7,7 +7,8 @@ from dataclasses import dataclass import math from typing import Any -from intellifire4py import IntellifireControlAsync, IntellifirePollData +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData from homeassistant.components.fan import ( FanEntity, @@ -31,8 +32,8 @@ from .entity import IntellifireEntity class IntellifireFanRequiredKeysMixin: """Required keys for fan entity.""" - set_fn: Callable[[IntellifireControlAsync, int], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + set_fn: Callable[[IntelliFireController, int], Awaitable] + value_fn: Callable[[IntelliFirePollData], int] speed_range: tuple[int, int] @@ -91,7 +92,8 @@ class IntellifireFan(IntellifireEntity, FanEntity): def percentage(self) -> int | None: """Return fan percentage.""" return ranged_value_to_percentage( - self.entity_description.speed_range, self.coordinator.read_api.data.fanspeed + self.entity_description.speed_range, + self.coordinator.read_api.data.fanspeed, ) @property diff --git a/homeassistant/components/intellifire/light.py b/homeassistant/components/intellifire/light.py index a7f2befaf33..5f25b5de823 100644 --- a/homeassistant/components/intellifire/light.py +++ b/homeassistant/components/intellifire/light.py @@ -6,7 +6,8 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from intellifire4py import IntellifireControlAsync, IntellifirePollData +from intellifire4py.control import IntelliFireController +from intellifire4py.model import IntelliFirePollData from homeassistant.components.light import ( ATTR_BRIGHTNESS, @@ -27,8 +28,8 @@ from .entity import IntellifireEntity class IntellifireLightRequiredKeysMixin: """Required keys for fan entity.""" - set_fn: Callable[[IntellifireControlAsync, int], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + set_fn: Callable[[IntelliFireController, int], Awaitable] + value_fn: Callable[[IntelliFirePollData], int] @dataclass(frozen=True) @@ -56,7 +57,7 @@ class IntellifireLight(IntellifireEntity, LightEntity): _attr_supported_color_modes = {ColorMode.BRIGHTNESS} @property - def brightness(self): + def brightness(self) -> int: """Return the current brightness 0-255.""" return 85 * self.entity_description.value_fn(self.coordinator.read_api.data) diff --git a/homeassistant/components/intellifire/manifest.json b/homeassistant/components/intellifire/manifest.json index 90d41fcffe7..e3ee663e8fe 100644 --- a/homeassistant/components/intellifire/manifest.json +++ b/homeassistant/components/intellifire/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/intellifire", "iot_class": "local_polling", "loggers": ["intellifire4py"], - "requirements": ["intellifire4py==2.2.2"] + "requirements": ["intellifire4py==4.1.9"] } diff --git a/homeassistant/components/intellifire/sensor.py b/homeassistant/components/intellifire/sensor.py index dd3eef9c9b4..eaff89d08e7 100644 --- a/homeassistant/components/intellifire/sensor.py +++ b/homeassistant/components/intellifire/sensor.py @@ -6,8 +6,6 @@ from collections.abc import Callable from dataclasses import dataclass from datetime import datetime, timedelta -from intellifire4py import IntellifirePollData - from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -29,7 +27,9 @@ from .entity import IntellifireEntity class IntellifireSensorRequiredKeysMixin: """Mixin for required keys.""" - value_fn: Callable[[IntellifirePollData], int | str | datetime | None] + value_fn: Callable[ + [IntellifireDataUpdateCoordinator], int | str | datetime | float | None + ] @dataclass(frozen=True) @@ -40,16 +40,29 @@ class IntellifireSensorEntityDescription( """Describes a sensor entity.""" -def _time_remaining_to_timestamp(data: IntellifirePollData) -> datetime | None: +def _time_remaining_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: """Define a sensor that takes into account timezone.""" - if not (seconds_offset := data.timeremaining_s): + if not (seconds_offset := coordinator.data.timeremaining_s): return None return utcnow() + timedelta(seconds=seconds_offset) -def _downtime_to_timestamp(data: IntellifirePollData) -> datetime | None: +def _downtime_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: """Define a sensor that takes into account a timezone.""" - if not (seconds_offset := data.downtime): + if not (seconds_offset := coordinator.data.downtime): + return None + return utcnow() - timedelta(seconds=seconds_offset) + + +def _uptime_to_timestamp( + coordinator: IntellifireDataUpdateCoordinator, +) -> datetime | None: + """Return a timestamp of how long the sensor has been up.""" + if not (seconds_offset := coordinator.data.uptime): return None return utcnow() - timedelta(seconds=seconds_offset) @@ -60,14 +73,14 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( translation_key="flame_height", state_class=SensorStateClass.MEASUREMENT, # UI uses 1-5 for flame height, backing lib uses 0-4 - value_fn=lambda data: (data.flameheight + 1), + value_fn=lambda coordinator: (coordinator.data.flameheight + 1), ), IntellifireSensorEntityDescription( key="temperature", state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda data: data.temperature_c, + value_fn=lambda coordinator: coordinator.data.temperature_c, ), IntellifireSensorEntityDescription( key="target_temp", @@ -75,13 +88,13 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, - value_fn=lambda data: data.thermostat_setpoint_c, + value_fn=lambda coordinator: coordinator.data.thermostat_setpoint_c, ), IntellifireSensorEntityDescription( key="fan_speed", translation_key="fan_speed", state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.fanspeed, + value_fn=lambda coordinator: coordinator.data.fanspeed, ), IntellifireSensorEntityDescription( key="timer_end_timestamp", @@ -102,27 +115,27 @@ INTELLIFIRE_SENSORS: tuple[IntellifireSensorEntityDescription, ...] = ( translation_key="uptime", entity_category=EntityCategory.DIAGNOSTIC, device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: utcnow() - timedelta(seconds=data.uptime), + value_fn=_uptime_to_timestamp, ), IntellifireSensorEntityDescription( key="connection_quality", translation_key="connection_quality", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.connection_quality, + value_fn=lambda coordinator: coordinator.data.connection_quality, entity_registry_enabled_default=False, ), IntellifireSensorEntityDescription( key="ecm_latency", translation_key="ecm_latency", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.ecm_latency, + value_fn=lambda coordinator: coordinator.data.ecm_latency, entity_registry_enabled_default=False, ), IntellifireSensorEntityDescription( key="ipv4_address", translation_key="ipv4_address", entity_category=EntityCategory.DIAGNOSTIC, - value_fn=lambda data: data.ipv4_address, + value_fn=lambda coordinator: coordinator.data.ipv4_address, ), ) @@ -134,17 +147,17 @@ async def async_setup_entry( coordinator: IntellifireDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] async_add_entities( - IntellifireSensor(coordinator=coordinator, description=description) + IntelliFireSensor(coordinator=coordinator, description=description) for description in INTELLIFIRE_SENSORS ) -class IntellifireSensor(IntellifireEntity, SensorEntity): - """Extends IntellifireEntity with Sensor specific logic.""" +class IntelliFireSensor(IntellifireEntity, SensorEntity): + """Extends IntelliFireEntity with Sensor specific logic.""" entity_description: IntellifireSensorEntityDescription @property - def native_value(self) -> int | str | datetime | None: + def native_value(self) -> int | str | datetime | float | None: """Return the state.""" - return self.entity_description.value_fn(self.coordinator.read_api.data) + return self.entity_description.value_fn(self.coordinator) diff --git a/homeassistant/components/intellifire/strings.json b/homeassistant/components/intellifire/strings.json index 6393a4e070d..2eeb2b50b93 100644 --- a/homeassistant/components/intellifire/strings.json +++ b/homeassistant/components/intellifire/strings.json @@ -1,39 +1,30 @@ { "config": { - "flow_title": "{serial} ({host})", + "flow_title": "{serial}", "step": { - "manual_device_entry": { - "description": "Local Configuration", - "data": { - "host": "Host (IP Address)" - } + "pick_cloud_device": { + "title": "Configure fireplace", + "description": "Select fireplace by serial number:" }, - "api_config": { + "cloud_api": { + "description": "Authenticate against IntelliFire Cloud", + "data_description": { + "username": "Your IntelliFire app username", + "password": "Your IntelliFire app password" + }, "data": { "username": "[%key:common::config_flow::data::email%]", "password": "[%key:common::config_flow::data::password%]" } - }, - "dhcp_confirm": { - "description": "Do you want to set up {host}\nSerial: {serial}?" - }, - "pick_device": { - "title": "Device Selection", - "description": "The following IntelliFire devices were discovered. Please select which you wish to configure.", - "data": { - "host": "[%key:common::config_flow::data::host%]" - } } }, "error": { - "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "api_error": "Login failed", - "iftapi_connect": "Error conecting to iftapi.net" + "api_error": "Login failed" }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", - "not_intellifire_device": "Not an IntelliFire Device." + "not_intellifire_device": "Not an IntelliFire device.", + "no_available_devices": "All available devices have already been configured." } }, "entity": { diff --git a/homeassistant/components/intellifire/switch.py b/homeassistant/components/intellifire/switch.py index 00de6d74a9c..ac6096497b6 100644 --- a/homeassistant/components/intellifire/switch.py +++ b/homeassistant/components/intellifire/switch.py @@ -6,16 +6,13 @@ from collections.abc import Awaitable, Callable from dataclasses import dataclass from typing import Any -from intellifire4py import IntellifirePollData -from intellifire4py.intellifire import IntellifireAPILocal - from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback +from . import IntellifireDataUpdateCoordinator from .const import DOMAIN -from .coordinator import IntellifireDataUpdateCoordinator from .entity import IntellifireEntity @@ -23,9 +20,9 @@ from .entity import IntellifireEntity class IntellifireSwitchRequiredKeysMixin: """Mixin for required keys.""" - on_fn: Callable[[IntellifireAPILocal], Awaitable] - off_fn: Callable[[IntellifireAPILocal], Awaitable] - value_fn: Callable[[IntellifirePollData], bool] + on_fn: Callable[[IntellifireDataUpdateCoordinator], Awaitable] + off_fn: Callable[[IntellifireDataUpdateCoordinator], Awaitable] + value_fn: Callable[[IntellifireDataUpdateCoordinator], bool] @dataclass(frozen=True) @@ -39,16 +36,16 @@ INTELLIFIRE_SWITCHES: tuple[IntellifireSwitchEntityDescription, ...] = ( IntellifireSwitchEntityDescription( key="on_off", translation_key="flame", - on_fn=lambda control_api: control_api.flame_on(), - off_fn=lambda control_api: control_api.flame_off(), - value_fn=lambda data: data.is_on, + on_fn=lambda coordinator: coordinator.control_api.flame_on(), + off_fn=lambda coordinator: coordinator.control_api.flame_off(), + value_fn=lambda coordinator: coordinator.read_api.data.is_on, ), IntellifireSwitchEntityDescription( key="pilot", translation_key="pilot_light", - on_fn=lambda control_api: control_api.pilot_on(), - off_fn=lambda control_api: control_api.pilot_off(), - value_fn=lambda data: data.pilot_on, + on_fn=lambda coordinator: coordinator.control_api.pilot_on(), + off_fn=lambda coordinator: coordinator.control_api.pilot_off(), + value_fn=lambda coordinator: coordinator.read_api.data.pilot_on, ), ) @@ -74,15 +71,15 @@ class IntellifireSwitch(IntellifireEntity, SwitchEntity): async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the switch.""" - await self.entity_description.on_fn(self.coordinator.control_api) + await self.entity_description.on_fn(self.coordinator) await self.async_update_ha_state(force_refresh=True) async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the switch.""" - await self.entity_description.off_fn(self.coordinator.control_api) + await self.entity_description.off_fn(self.coordinator) await self.async_update_ha_state(force_refresh=True) @property def is_on(self) -> bool | None: """Return the on state.""" - return self.entity_description.value_fn(self.coordinator.read_api.data) + return self.entity_description.value_fn(self.coordinator) diff --git a/homeassistant/components/intent_script/__init__.py b/homeassistant/components/intent_script/__init__.py index d6fbb1edd80..6f47cadb04f 100644 --- a/homeassistant/components/intent_script/__init__.py +++ b/homeassistant/components/intent_script/__init__.py @@ -90,7 +90,6 @@ async def async_reload(hass: HomeAssistant, service_call: ServiceCall) -> None: def async_load_intents(hass: HomeAssistant, intents: dict[str, ConfigType]) -> None: """Load YAML intents into the intent system.""" - template.attach(hass, intents) hass.data[DOMAIN] = intents for intent_type, conf in intents.items(): @@ -145,6 +144,12 @@ class _IntentCardData(TypedDict): class ScriptIntentHandler(intent.IntentHandler): """Respond to an intent with a script.""" + slot_schema = { + vol.Any("name", "area", "floor"): cv.string, + vol.Optional("domain"): vol.All(cv.ensure_list, [cv.string]), + vol.Optional("device_class"): vol.All(cv.ensure_list, [cv.string]), + } + def __init__(self, intent_type: str, config: ConfigType) -> None: """Initialize the script intent handler.""" self.intent_type = intent_type @@ -159,8 +164,10 @@ class ScriptIntentHandler(intent.IntentHandler): card: _IntentCardData | None = self.config.get(CONF_CARD) action: script.Script | None = self.config.get(CONF_ACTION) is_async_action: bool = self.config[CONF_ASYNC_ACTION] + hass: HomeAssistant = intent_obj.hass + intent_slots = self.async_validate_slots(intent_obj.slots) slots: dict[str, Any] = { - key: value["value"] for key, value in intent_obj.slots.items() + key: value["value"] for key, value in intent_slots.items() } _LOGGER.debug( @@ -173,6 +180,51 @@ class ScriptIntentHandler(intent.IntentHandler): }, ) + entity_name = slots.get("name") + area_name = slots.get("area") + floor_name = slots.get("floor") + + # Optional domain/device class filters. + # Convert to sets for speed. + domains: set[str] | None = None + device_classes: set[str] | None = None + + if "domain" in slots: + domains = set(slots["domain"]) + + if "device_class" in slots: + device_classes = set(slots["device_class"]) + + match_constraints = intent.MatchTargetsConstraints( + name=entity_name, + area_name=area_name, + floor_name=floor_name, + domains=domains, + device_classes=device_classes, + assistant=intent_obj.assistant, + ) + + if match_constraints.has_constraints: + match_result = intent.async_match_targets(hass, match_constraints) + if match_result.is_match: + targets = {} + + if match_result.states: + targets["entities"] = [ + state.entity_id for state in match_result.states + ] + + if match_result.areas: + targets["areas"] = [area.id for area in match_result.areas] + + if match_result.floors: + targets["floors"] = [ + floor.floor_id for floor in match_result.floors + ] + + if targets: + slots["targets"] = targets + if action is not None: if is_async_action: intent_obj.hass.async_create_task( diff --git a/homeassistant/components/intent_script/icons.json b/homeassistant/components/intent_script/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/intent_script/icons.json +++ b/homeassistant/components/intent_script/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/iotawatt/config_flow.py b/homeassistant/components/iotawatt/config_flow.py index f8821784a1d..187423c7d8b 100644 --- a/homeassistant/components/iotawatt/config_flow.py +++ b/homeassistant/components/iotawatt/config_flow.py @@ -3,11 +3,12 @@ from __future__ import annotations import logging +from typing import Any from iotawattpy.iotawatt import Iotawatt import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -46,11 +47,13 @@ class IOTaWattConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize.""" - self._data = {} + self._data: dict[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" if user_input is None: user_input = {} diff --git a/homeassistant/components/iperf3/icons.json b/homeassistant/components/iperf3/icons.json index 3ef7e301ed6..f6ebe1aee2f 100644 --- a/homeassistant/components/iperf3/icons.json +++ b/homeassistant/components/iperf3/icons.json @@ -1,5 +1,7 @@ { "services": { - "speedtest": "mdi:speedometer" + "speedtest": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/iqvia/sensor.py b/homeassistant/components/iqvia/sensor.py index ba3c288b702..af351e0d543 100644 --- a/homeassistant/components/iqvia/sensor.py +++ b/homeassistant/components/iqvia/sensor.py @@ -244,8 +244,8 @@ class IndexSensor(IQVIAEntity, SensorEntity): key = self.entity_description.key.split("_")[-1].title() try: - [period] = [p for p in data["periods"] if p["Type"] == key] # type: ignore[index] - except TypeError: + period = next(p for p in data["periods"] if p["Type"] == key) # type: ignore[index] + except StopIteration: return data = cast(dict[str, Any], data) diff --git a/homeassistant/components/islamic_prayer_times/__init__.py b/homeassistant/components/islamic_prayer_times/__init__.py index 089afc88564..d61eba343ac 100644 --- a/homeassistant/components/islamic_prayer_times/__init__.py +++ b/homeassistant/components/islamic_prayer_times/__init__.py @@ -7,14 +7,12 @@ import logging from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE, Platform from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .const import DOMAIN from .coordinator import IslamicPrayerDataUpdateCoordinator PLATFORMS = [Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/ista_ecotrend/manifest.json b/homeassistant/components/ista_ecotrend/manifest.json index 23d60a0a5bb..baa5fbde9c0 100644 --- a/homeassistant/components/ista_ecotrend/manifest.json +++ b/homeassistant/components/ista_ecotrend/manifest.json @@ -1,6 +1,7 @@ { "domain": "ista_ecotrend", "name": "ista EcoTrend", + "after_dependencies": ["recorder"], "codeowners": ["@tr4nt0r"], "config_flow": true, "documentation": "https://www.home-assistant.io/integrations/ista_ecotrend", diff --git a/homeassistant/components/ista_ecotrend/sensor.py b/homeassistant/components/ista_ecotrend/sensor.py index 3ae2128e142..7aa1adfe4c9 100644 --- a/homeassistant/components/ista_ecotrend/sensor.py +++ b/homeassistant/components/ista_ecotrend/sensor.py @@ -2,10 +2,21 @@ from __future__ import annotations +import asyncio from dataclasses import dataclass +import datetime from enum import StrEnum import logging +from homeassistant.components.recorder.models.statistics import ( + StatisticData, + StatisticMetaData, +) +from homeassistant.components.recorder.statistics import ( + async_add_external_statistics, + get_instance, + get_last_statistics, +) from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, @@ -14,7 +25,11 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import UnitOfEnergy, UnitOfVolume from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.device_registry import ( + DeviceEntry, + DeviceEntryType, + DeviceInfo, +) from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -22,7 +37,7 @@ from homeassistant.helpers.update_coordinator import CoordinatorEntity from . import IstaConfigEntry from .const import DOMAIN from .coordinator import IstaCoordinator -from .util import IstaConsumptionType, IstaValueType, get_native_value +from .util import IstaConsumptionType, IstaValueType, get_native_value, get_statistics _LOGGER = logging.getLogger(__name__) @@ -155,6 +170,7 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity): entity_description: IstaSensorEntityDescription _attr_has_entity_name = True + device_entry: DeviceEntry def __init__( self, @@ -186,3 +202,81 @@ class IstaSensor(CoordinatorEntity[IstaCoordinator], SensorEntity): consumption_type=self.entity_description.consumption_type, value_type=self.entity_description.value_type, ) + + async def async_added_to_hass(self) -> None: + """When added to hass.""" + # perform initial statistics import when sensor is added, otherwise it would take + # 1 day when _handle_coordinator_update is triggered for the first time. + await self.update_statistics() + await super().async_added_to_hass() + + def _handle_coordinator_update(self) -> None: + """Handle coordinator update.""" + asyncio.run_coroutine_threadsafe(self.update_statistics(), self.hass.loop) + + async def update_statistics(self) -> None: + """Import ista EcoTrend historical statistics.""" + + # Remember the statistic_id that was initially created + # in case the entity gets renamed, because we cannot + # change the statistic_id + name = self.coordinator.config_entry.options.get( + f"lts_{self.entity_description.key}_{self.consumption_unit}" + ) + if not name: + name = self.entity_id.removeprefix("sensor.") + self.hass.config_entries.async_update_entry( + entry=self.coordinator.config_entry, + options={ + **self.coordinator.config_entry.options, + f"lts_{self.entity_description.key}_{self.consumption_unit}": name, + }, + ) + + statistic_id = f"{DOMAIN}:{name}" + statistics_sum = 0.0 + statistics_since = None + + last_stats = await get_instance(self.hass).async_add_executor_job( + get_last_statistics, + self.hass, + 1, + statistic_id, + False, + {"sum"}, + ) + + _LOGGER.debug("Last statistics: %s", last_stats) + + if last_stats: + statistics_sum = last_stats[statistic_id][0].get("sum") or 0.0 + statistics_since = datetime.datetime.fromtimestamp( + last_stats[statistic_id][0].get("end") or 0, tz=datetime.UTC + ) + datetime.timedelta(days=1) + + if monthly_consumptions := get_statistics( + self.coordinator.data[self.consumption_unit], + self.entity_description.consumption_type, + self.entity_description.value_type, + ): + statistics: list[StatisticData] = [ + { + "start": consumptions["date"], + "state": consumptions["value"], + "sum": (statistics_sum := statistics_sum + consumptions["value"]), + } + for consumptions in monthly_consumptions + if statistics_since is None or consumptions["date"] > statistics_since + ] + + metadata: StatisticMetaData = { + "has_mean": False, + "has_sum": True, + "name": f"{self.device_entry.name} {self.name}", + "source": DOMAIN, + "statistic_id": statistic_id, + "unit_of_measurement": self.entity_description.native_unit_of_measurement, + } + if statistics: + _LOGGER.debug("Insert statistics: %s %s", metadata, statistics) + async_add_external_statistics(self.hass, metadata, statistics) diff --git a/homeassistant/components/isy994/const.py b/homeassistant/components/isy994/const.py index 85ecafd6490..57b30c88075 100644 --- a/homeassistant/components/isy994/const.py +++ b/homeassistant/components/isy994/const.py @@ -23,7 +23,6 @@ from homeassistant.const import ( DEGREE, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, REVOLUTIONS_PER_MINUTE, SERVICE_LOCK, SERVICE_UNLOCK, @@ -50,6 +49,7 @@ from homeassistant.const import ( UnitOfMass, UnitOfPower, UnitOfPressure, + UnitOfReactivePower, UnitOfSoundPressure, UnitOfSpeed, UnitOfTemperature, @@ -438,7 +438,7 @@ UOM_FRIENDLY_NAME = { "133": UnitOfFrequency.KILOHERTZ, "134": f"{UnitOfLength.METERS}/{UnitOfTime.SECONDS}²", "135": UnitOfApparentPower.VOLT_AMPERE, # Volt-Amp - "136": POWER_VOLT_AMPERE_REACTIVE, # VAR = Volt-Amp Reactive + "136": UnitOfReactivePower.VOLT_AMPERE_REACTIVE, # VAR = Volt-Amp Reactive "137": "", # NTP DateTime - Number of seconds since 1900 "138": UnitOfPressure.PSI, "139": DEGREE, # Degree 0-360 diff --git a/homeassistant/components/isy994/icons.json b/homeassistant/components/isy994/icons.json index 27b2ea6954e..9c6e7fa78df 100644 --- a/homeassistant/components/isy994/icons.json +++ b/homeassistant/components/isy994/icons.json @@ -1,12 +1,28 @@ { "services": { - "send_raw_node_command": "mdi:console-line", - "send_node_command": "mdi:console", - "get_zwave_parameter": "mdi:download", - "set_zwave_parameter": "mdi:upload", - "set_zwave_lock_user_code": "mdi:upload-lock", - "delete_zwave_lock_user_code": "mdi:lock-remove", - "rename_node": "mdi:pencil", - "send_program_command": "mdi:console" + "send_raw_node_command": { + "service": "mdi:console-line" + }, + "send_node_command": { + "service": "mdi:console" + }, + "get_zwave_parameter": { + "service": "mdi:download" + }, + "set_zwave_parameter": { + "service": "mdi:upload" + }, + "set_zwave_lock_user_code": { + "service": "mdi:upload-lock" + }, + "delete_zwave_lock_user_code": { + "service": "mdi:lock-remove" + }, + "rename_node": { + "service": "mdi:pencil" + }, + "send_program_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/izone/icons.json b/homeassistant/components/izone/icons.json index e02cd57c141..bb38db27839 100644 --- a/homeassistant/components/izone/icons.json +++ b/homeassistant/components/izone/icons.json @@ -1,6 +1,10 @@ { "services": { - "airflow_min": "mdi:fan-minus", - "airflow_max": "mdi:fan-plus" + "airflow_min": { + "service": "mdi:fan-minus" + }, + "airflow_max": { + "service": "mdi:fan-plus" + } } } diff --git a/homeassistant/components/jewish_calendar/config_flow.py b/homeassistant/components/jewish_calendar/config_flow.py index 8f04d73915f..518db38b3bb 100644 --- a/homeassistant/components/jewish_calendar/config_flow.py +++ b/homeassistant/components/jewish_calendar/config_flow.py @@ -30,7 +30,6 @@ from homeassistant.helpers.selector import ( SelectSelector, SelectSelectorConfig, ) -from homeassistant.helpers.typing import ConfigType from .const import ( CONF_CANDLE_LIGHT_MINUTES, @@ -125,11 +124,9 @@ class JewishCalendarConfigFlow(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_import( - self, import_config: ConfigType | None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) class JewishCalendarOptionsFlowHandler(OptionsFlowWithConfigEntry): diff --git a/homeassistant/components/juicenet/config_flow.py b/homeassistant/components/juicenet/config_flow.py index 607ffb6ffe2..8bcee5677e6 100644 --- a/homeassistant/components/juicenet/config_flow.py +++ b/homeassistant/components/juicenet/config_flow.py @@ -1,13 +1,14 @@ """Config flow for JuiceNet integration.""" import logging +from typing import Any import aiohttp from pyjuicenet import Api, TokenError import voluptuous as vol from homeassistant import core, exceptions -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -44,7 +45,9 @@ class JuiceNetConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: @@ -66,9 +69,9 @@ class JuiceNetConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) class CannotConnect(exceptions.HomeAssistantError): diff --git a/homeassistant/components/justnimbus/config_flow.py b/homeassistant/components/justnimbus/config_flow.py index 0520c558266..8c816c1ac1b 100644 --- a/homeassistant/components/justnimbus/config_flow.py +++ b/homeassistant/components/justnimbus/config_flow.py @@ -77,7 +77,7 @@ class JustNimbusConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self.reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/jvc_projector/__init__.py b/homeassistant/components/jvc_projector/__init__.py index 8ce1fb46e3d..09e93127e40 100644 --- a/homeassistant/components/jvc_projector/__init__.py +++ b/homeassistant/components/jvc_projector/__init__.py @@ -15,13 +15,14 @@ from homeassistant.const import ( from homeassistant.core import Event, HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from .const import DOMAIN from .coordinator import JvcProjectorDataUpdateCoordinator +type JVCConfigEntry = ConfigEntry[JvcProjectorDataUpdateCoordinator] + PLATFORMS = [Platform.BINARY_SENSOR, Platform.REMOTE, Platform.SELECT, Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: JVCConfigEntry) -> bool: """Set up integration from a config entry.""" device = JvcProjector( host=entry.data[CONF_HOST], @@ -43,7 +44,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: coordinator = JvcProjectorDataUpdateCoordinator(hass, device) await coordinator.async_config_entry_first_refresh() - hass.data.setdefault(DOMAIN, {})[entry.entry_id] = coordinator + entry.runtime_data = coordinator async def disconnect(event: Event) -> None: await device.disconnect() @@ -57,9 +58,8 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: JVCConfigEntry) -> bool: """Unload config entry.""" if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): - await hass.data[DOMAIN][entry.entry_id].device.disconnect() - hass.data[DOMAIN].pop(entry.entry_id) + await entry.runtime_data.device.disconnect() return unload_ok diff --git a/homeassistant/components/jvc_projector/binary_sensor.py b/homeassistant/components/jvc_projector/binary_sensor.py index 7e8788aa0a6..6dfac63892b 100644 --- a/homeassistant/components/jvc_projector/binary_sensor.py +++ b/homeassistant/components/jvc_projector/binary_sensor.py @@ -5,22 +5,20 @@ from __future__ import annotations from jvcprojector import const from homeassistant.components.binary_sensor import BinarySensorEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import JvcProjectorDataUpdateCoordinator -from .const import DOMAIN +from . import JVCConfigEntry, JvcProjectorDataUpdateCoordinator from .entity import JvcProjectorEntity ON_STATUS = (const.ON, const.WARMING) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: JVCConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the JVC Projector platform from a config entry.""" - coordinator: JvcProjectorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities([JvcBinarySensor(coordinator)]) diff --git a/homeassistant/components/jvc_projector/config_flow.py b/homeassistant/components/jvc_projector/config_flow.py index 7564d571d3b..253aa640f71 100644 --- a/homeassistant/components/jvc_projector/config_flow.py +++ b/homeassistant/components/jvc_projector/config_flow.py @@ -37,7 +37,7 @@ class JvcProjectorConfigFlow(ConfigFlow, domain=DOMAIN): try: if not is_host_valid(host): - raise InvalidHost + raise InvalidHost # noqa: TRY301 mac = await get_mac_address(host, port, password) except InvalidHost: @@ -74,7 +74,7 @@ class JvcProjectorConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth on password authentication error.""" self._reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/jvc_projector/remote.py b/homeassistant/components/jvc_projector/remote.py index b69d3b0118b..f90a2816363 100644 --- a/homeassistant/components/jvc_projector/remote.py +++ b/homeassistant/components/jvc_projector/remote.py @@ -10,12 +10,11 @@ from typing import Any from jvcprojector import const from homeassistant.components.remote import RemoteEntity -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity_platform import AddEntitiesCallback -from .const import DOMAIN +from . import JVCConfigEntry from .entity import JvcProjectorEntity COMMANDS = { @@ -55,10 +54,10 @@ _LOGGER = logging.getLogger(__name__) async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: JVCConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the JVC Projector platform from a config entry.""" - coordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities([JvcProjectorRemote(coordinator)], True) diff --git a/homeassistant/components/jvc_projector/select.py b/homeassistant/components/jvc_projector/select.py index 1395637fad1..60c80f98fc0 100644 --- a/homeassistant/components/jvc_projector/select.py +++ b/homeassistant/components/jvc_projector/select.py @@ -9,12 +9,10 @@ from typing import Final from jvcprojector import JvcProjector, const from homeassistant.components.select import SelectEntity, SelectEntityDescription -from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import JvcProjectorDataUpdateCoordinator -from .const import DOMAIN +from . import JVCConfigEntry, JvcProjectorDataUpdateCoordinator from .entity import JvcProjectorEntity @@ -41,11 +39,11 @@ SELECTS: Final[list[JvcProjectorSelectDescription]] = [ async def async_setup_entry( hass: HomeAssistant, - entry: ConfigEntry, + entry: JVCConfigEntry, async_add_entities: AddEntitiesCallback, ) -> None: """Set up the JVC Projector platform from a config entry.""" - coordinator: JvcProjectorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( JvcProjectorSelectEntity(coordinator, description) for description in SELECTS diff --git a/homeassistant/components/jvc_projector/sensor.py b/homeassistant/components/jvc_projector/sensor.py index 9be04b367e6..3edf51e4316 100644 --- a/homeassistant/components/jvc_projector/sensor.py +++ b/homeassistant/components/jvc_projector/sensor.py @@ -9,13 +9,11 @@ from homeassistant.components.sensor import ( SensorEntity, SensorEntityDescription, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import JvcProjectorDataUpdateCoordinator -from .const import DOMAIN +from . import JVCConfigEntry, JvcProjectorDataUpdateCoordinator from .entity import JvcProjectorEntity JVC_SENSORS = ( @@ -36,10 +34,10 @@ JVC_SENSORS = ( async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback + hass: HomeAssistant, entry: JVCConfigEntry, async_add_entities: AddEntitiesCallback ) -> None: """Set up the JVC Projector platform from a config entry.""" - coordinator: JvcProjectorDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + coordinator = entry.runtime_data async_add_entities( JvcSensor(coordinator, description) for description in JVC_SENSORS diff --git a/homeassistant/components/kaleidescape/config_flow.py b/homeassistant/components/kaleidescape/config_flow.py index bb9f47ec1e8..e4a562dc00b 100644 --- a/homeassistant/components/kaleidescape/config_flow.py +++ b/homeassistant/components/kaleidescape/config_flow.py @@ -38,7 +38,7 @@ class KaleidescapeConfigFlow(ConfigFlow, domain=DOMAIN): try: info = await validate_host(host) if info.server_only: - raise UnsupportedError + raise UnsupportedError # noqa: TRY301 except ConnectionError: errors["base"] = ERROR_CANNOT_CONNECT except UnsupportedError: @@ -73,7 +73,7 @@ class KaleidescapeConfigFlow(ConfigFlow, domain=DOMAIN): try: self.discovered_device = await validate_host(host) if self.discovered_device.server_only: - raise UnsupportedError + raise UnsupportedError # noqa: TRY301 except ConnectionError: return self.async_abort(reason=ERROR_CANNOT_CONNECT) except UnsupportedError: diff --git a/homeassistant/components/keba/icons.json b/homeassistant/components/keba/icons.json index 7f64bf7fb34..6de43a84cf6 100644 --- a/homeassistant/components/keba/icons.json +++ b/homeassistant/components/keba/icons.json @@ -1,12 +1,28 @@ { "services": { - "request_data": "mdi:database-arrow-down", - "authorize": "mdi:lock", - "deauthorize": "mdi:lock-open", - "set_energy": "mdi:flash", - "set_current": "mdi:flash", - "enable": "mdi:flash", - "disable": "mdi:fash-off", - "set_failsafe": "mdi:message-alert" + "request_data": { + "service": "mdi:database-arrow-down" + }, + "authorize": { + "service": "mdi:lock" + }, + "deauthorize": { + "service": "mdi:lock-open" + }, + "set_energy": { + "service": "mdi:flash" + }, + "set_current": { + "service": "mdi:flash" + }, + "enable": { + "service": "mdi:flash" + }, + "disable": { + "service": "mdi:fash-off" + }, + "set_failsafe": { + "service": "mdi:message-alert" + } } } diff --git a/homeassistant/components/keba/sensor.py b/homeassistant/components/keba/sensor.py index 74c08933cbe..1878a7f6e49 100644 --- a/homeassistant/components/keba/sensor.py +++ b/homeassistant/components/keba/sensor.py @@ -64,7 +64,7 @@ async def async_setup_platform( keba, "session_energy", SensorEntityDescription( - key="E pres", + key="E pres", # codespell:ignore pres name="Session Energy", native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, device_class=SensorDeviceClass.ENERGY, diff --git a/homeassistant/components/kef/icons.json b/homeassistant/components/kef/icons.json index eeb6dd099ce..e259e91eb1b 100644 --- a/homeassistant/components/kef/icons.json +++ b/homeassistant/components/kef/icons.json @@ -1,12 +1,28 @@ { "services": { - "update_dsp": "mdi:update", - "set_mode": "mdi:cog", - "set_desk_db": "mdi:volume-high", - "set_wall_db": "mdi:volume-high", - "set_treble_db": "mdi:volume-high", - "set_high_hz": "mdi:sine-wave", - "set_low_hz": "mdi:cosine-wave", - "set_sub_db": "mdi:volume-high" + "update_dsp": { + "service": "mdi:update" + }, + "set_mode": { + "service": "mdi:cog" + }, + "set_desk_db": { + "service": "mdi:volume-high" + }, + "set_wall_db": { + "service": "mdi:volume-high" + }, + "set_treble_db": { + "service": "mdi:volume-high" + }, + "set_high_hz": { + "service": "mdi:sine-wave" + }, + "set_low_hz": { + "service": "mdi:cosine-wave" + }, + "set_sub_db": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/kef/media_player.py b/homeassistant/components/kef/media_player.py index ad335499ba4..1c5188b1a6f 100644 --- a/homeassistant/components/kef/media_player.py +++ b/homeassistant/components/kef/media_player.py @@ -161,7 +161,7 @@ async def async_setup_platform( }, "set_mode", ) - platform.async_register_entity_service(SERVICE_UPDATE_DSP, {}, "update_dsp") + platform.async_register_entity_service(SERVICE_UPDATE_DSP, None, "update_dsp") def add_service(name, which, option): options = DSP_OPTION_MAPPING[which] diff --git a/homeassistant/components/keyboard/icons.json b/homeassistant/components/keyboard/icons.json index 8186b2684dd..03b6210bf41 100644 --- a/homeassistant/components/keyboard/icons.json +++ b/homeassistant/components/keyboard/icons.json @@ -1,10 +1,22 @@ { "services": { - "volume_up": "mdi:volume-high", - "volume_down": "mdi:volume-low", - "volume_mute": "mdi:volume-off", - "media_play_pause": "mdi:play-pause", - "media_next_track": "mdi:skip-next", - "media_prev_track": "mdi:skip-previous" + "volume_up": { + "service": "mdi:volume-high" + }, + "volume_down": { + "service": "mdi:volume-low" + }, + "volume_mute": { + "service": "mdi:volume-off" + }, + "media_play_pause": { + "service": "mdi:play-pause" + }, + "media_next_track": { + "service": "mdi:skip-next" + }, + "media_prev_track": { + "service": "mdi:skip-previous" + } } } diff --git a/homeassistant/components/keymitt_ble/config_flow.py b/homeassistant/components/keymitt_ble/config_flow.py index 589798a281a..217ce3cc923 100644 --- a/homeassistant/components/keymitt_ble/config_flow.py +++ b/homeassistant/components/keymitt_ble/config_flow.py @@ -42,9 +42,9 @@ class MicroBotConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize.""" - self._errors = {} + self._errors: dict[str, str] = {} self._discovered_adv: MicroBotAdvertisement | None = None self._discovered_advs: dict[str, MicroBotAdvertisement] = {} self._client: MicroBotApiClient | None = None diff --git a/homeassistant/components/keymitt_ble/icons.json b/homeassistant/components/keymitt_ble/icons.json index 77450fbf026..d265d96b395 100644 --- a/homeassistant/components/keymitt_ble/icons.json +++ b/homeassistant/components/keymitt_ble/icons.json @@ -1,5 +1,7 @@ { "services": { - "calibrate": "mdi:wrench" + "calibrate": { + "service": "mdi:wrench" + } } } diff --git a/homeassistant/components/kitchen_sink/__init__.py b/homeassistant/components/kitchen_sink/__init__.py index 94dfca77410..2c3887bb383 100644 --- a/homeassistant/components/kitchen_sink/__init__.py +++ b/homeassistant/components/kitchen_sink/__init__.py @@ -9,6 +9,8 @@ from __future__ import annotations import datetime from random import random +import voluptuous as vol + from homeassistant.components.recorder import DOMAIN as RECORDER_DOMAIN, get_instance from homeassistant.components.recorder.models import StatisticData, StatisticMetaData from homeassistant.components.recorder.statistics import ( @@ -18,7 +20,7 @@ from homeassistant.components.recorder.statistics import ( ) from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry from homeassistant.const import Platform, UnitOfEnergy, UnitOfTemperature, UnitOfVolume -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType @@ -40,6 +42,15 @@ COMPONENTS_WITH_DEMO_PLATFORM = [ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +SCHEMA_SERVICE_TEST_SERVICE_1 = vol.Schema( + { + vol.Required("field_1"): vol.Coerce(int), + vol.Required("field_2"): vol.In(["off", "auto", "cool"]), + vol.Optional("field_3"): vol.Coerce(int), + vol.Optional("field_4"): vol.In(["forwards", "reverse"]), + } +) + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the demo environment.""" @@ -48,6 +59,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: DOMAIN, context={"source": SOURCE_IMPORT}, data={} ) ) + + @callback + def service_handler(call: ServiceCall | None = None) -> None: + """Do nothing.""" + + hass.services.async_register( + DOMAIN, "test_service_1", service_handler, SCHEMA_SERVICE_TEST_SERVICE_1 + ) + return True diff --git a/homeassistant/components/kitchen_sink/config_flow.py b/homeassistant/components/kitchen_sink/config_flow.py index c561ca29b8a..9a0b78c80e6 100644 --- a/homeassistant/components/kitchen_sink/config_flow.py +++ b/homeassistant/components/kitchen_sink/config_flow.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Mapping from typing import Any import voluptuous as vol @@ -34,14 +35,16 @@ class KitchenSinkConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Set the config entry up from yaml.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") - return self.async_create_entry(title="Kitchen Sink", data=import_info) + return self.async_create_entry(title="Kitchen Sink", data=import_data) - async def async_step_reauth(self, data): + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Reauth step.""" return await self.async_step_reauth_confirm() diff --git a/homeassistant/components/kitchen_sink/icons.json b/homeassistant/components/kitchen_sink/icons.json index 85472996819..565d595d9c7 100644 --- a/homeassistant/components/kitchen_sink/icons.json +++ b/homeassistant/components/kitchen_sink/icons.json @@ -2,10 +2,18 @@ "options": { "step": { "options_1": { - "section": { + "sections": { "section_1": "mdi:robot" } } } + }, + "services": { + "test_service_1": { + "service": "mdi:flask", + "sections": { + "advanced_fields": "mdi:test-tube" + } + } } } diff --git a/homeassistant/components/kitchen_sink/lawn_mower.py b/homeassistant/components/kitchen_sink/lawn_mower.py index 50ec70f6759..51814fb262d 100644 --- a/homeassistant/components/kitchen_sink/lawn_mower.py +++ b/homeassistant/components/kitchen_sink/lawn_mower.py @@ -30,18 +30,26 @@ async def async_setup_platform( ), DemoLawnMower( "kitchen_sink_mower_002", + "Mower can return", + LawnMowerActivity.RETURNING, + LawnMowerEntityFeature.DOCK + | LawnMowerEntityFeature.PAUSE + | LawnMowerEntityFeature.START_MOWING, + ), + DemoLawnMower( + "kitchen_sink_mower_003", "Mower can dock", LawnMowerActivity.MOWING, LawnMowerEntityFeature.DOCK | LawnMowerEntityFeature.START_MOWING, ), DemoLawnMower( - "kitchen_sink_mower_003", + "kitchen_sink_mower_004", "Mower can pause", LawnMowerActivity.DOCKED, LawnMowerEntityFeature.PAUSE | LawnMowerEntityFeature.START_MOWING, ), DemoLawnMower( - "kitchen_sink_mower_004", + "kitchen_sink_mower_005", "Mower can do all", LawnMowerActivity.DOCKED, LawnMowerEntityFeature.DOCK @@ -49,7 +57,7 @@ async def async_setup_platform( | LawnMowerEntityFeature.START_MOWING, ), DemoLawnMower( - "kitchen_sink_mower_005", + "kitchen_sink_mower_006", "Mower is paused", LawnMowerActivity.PAUSED, LawnMowerEntityFeature.DOCK diff --git a/homeassistant/components/kitchen_sink/services.yaml b/homeassistant/components/kitchen_sink/services.yaml new file mode 100644 index 00000000000..c65495095dc --- /dev/null +++ b/homeassistant/components/kitchen_sink/services.yaml @@ -0,0 +1,32 @@ +test_service_1: + fields: + field_1: + required: true + selector: + number: + min: 0 + max: 60 + unit_of_measurement: seconds + field_2: + required: true + selector: + select: + options: + - "off" + - "auto" + - "cool" + advanced_fields: + collapsed: true + fields: + field_3: + selector: + number: + min: 0 + max: 24 + unit_of_measurement: hours + field_4: + selector: + select: + options: + - "forward" + - "reverse" diff --git a/homeassistant/components/kitchen_sink/strings.json b/homeassistant/components/kitchen_sink/strings.json index e67527d8468..b10534eac00 100644 --- a/homeassistant/components/kitchen_sink/strings.json +++ b/homeassistant/components/kitchen_sink/strings.json @@ -12,7 +12,7 @@ "data": {} }, "options_1": { - "section": { + "sections": { "section_1": { "data": { "bool": "Optional boolean", @@ -71,5 +71,35 @@ "title": "This is not a fixable problem", "description": "This issue is never going to give up." } + }, + "services": { + "test_service_1": { + "name": "Test service 1", + "description": "Fake service for testing", + "fields": { + "field_1": { + "name": "Field 1", + "description": "Number of seconds" + }, + "field_2": { + "name": "Field 2", + "description": "Mode" + }, + "field_3": { + "name": "Field 3", + "description": "Number of hours" + }, + "field_4": { + "name": "Field 4", + "description": "Direction" + } + }, + "sections": { + "advanced_fields": { + "name": "Advanced options", + "description": "Some very advanced things" + } + } + } } } diff --git a/homeassistant/components/kmtronic/config_flow.py b/homeassistant/components/kmtronic/config_flow.py index 746b075789f..f83d102ac05 100644 --- a/homeassistant/components/kmtronic/config_flow.py +++ b/homeassistant/components/kmtronic/config_flow.py @@ -3,13 +3,19 @@ from __future__ import annotations import logging +from typing import Any import aiohttp from pykmtronic.auth import Auth from pykmtronic.hub import KMTronicHubAPI import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -62,7 +68,9 @@ class KmtronicConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return KMTronicOptionsFlow(config_entry) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/knocki/strings.json b/homeassistant/components/knocki/strings.json index b7a7daad1fc..8f5d0161166 100644 --- a/homeassistant/components/knocki/strings.json +++ b/homeassistant/components/knocki/strings.json @@ -11,6 +11,9 @@ "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_service%]" } }, "entity": { diff --git a/homeassistant/components/knx/__init__.py b/homeassistant/components/knx/__init__.py index a401ee2ccac..01d5294639c 100644 --- a/homeassistant/components/knx/__init__.py +++ b/homeassistant/components/knx/__init__.py @@ -297,6 +297,7 @@ class KNXModule: self.config_store = KNXConfigStore(hass=hass, config_entry=entry) self.xknx = XKNX( + address_format=self.project.get_address_format(), connection_config=self.connection_config(), rate_limit=self.entry.data[CONF_KNX_RATE_LIMIT], state_updater=self.entry.data[CONF_KNX_STATE_UPDATER], diff --git a/homeassistant/components/knx/climate.py b/homeassistant/components/knx/climate.py index abce143c760..4932df55087 100644 --- a/homeassistant/components/knx/climate.py +++ b/homeassistant/components/knx/climate.py @@ -10,11 +10,10 @@ from xknx.devices import ( ClimateMode as XknxClimateMode, Device as XknxDevice, ) -from xknx.dpt.dpt_20 import HVACControllerMode +from xknx.dpt.dpt_20 import HVACControllerMode, HVACOperationMode from homeassistant import config_entries from homeassistant.components.climate import ( - PRESET_AWAY, ClimateEntity, ClimateEntityFeature, HVACAction, @@ -32,19 +31,12 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import KNXModule -from .const import ( - CONTROLLER_MODES, - CURRENT_HVAC_ACTIONS, - DATA_KNX_CONFIG, - DOMAIN, - PRESET_MODES, -) +from .const import CONTROLLER_MODES, CURRENT_HVAC_ACTIONS, DATA_KNX_CONFIG, DOMAIN from .knx_entity import KnxYamlEntity from .schema import ClimateSchema ATTR_COMMAND_VALUE = "command_value" CONTROLLER_MODES_INV = {value: key for key, value in CONTROLLER_MODES.items()} -PRESET_MODES_INV = {value: key for key, value in PRESET_MODES.items()} async def async_setup_entry( @@ -142,6 +134,7 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): _device: XknxClimate _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = "knx_climate" _enable_turn_on_off_backwards_compatibility = False def __init__(self, knx_module: KNXModule, config: ConfigType) -> None: @@ -165,8 +158,14 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): ClimateEntityFeature.TURN_OFF | ClimateEntityFeature.TURN_ON ) - if self.preset_modes: + if ( + self._device.mode is not None + and self._device.mode.operation_modes # empty list when not writable + ): self._attr_supported_features |= ClimateEntityFeature.PRESET_MODE + self._attr_preset_modes = [ + mode.name.lower() for mode in self._device.mode.operation_modes + ] self._attr_target_temperature_step = self._device.temperature_step self._attr_unique_id = ( f"{self._device.temperature.group_address_state}_" @@ -309,32 +308,18 @@ class KNXClimate(KnxYamlEntity, ClimateEntity): Requires ClimateEntityFeature.PRESET_MODE. """ if self._device.mode is not None and self._device.mode.supports_operation_mode: - return PRESET_MODES.get(self._device.mode.operation_mode, PRESET_AWAY) + return self._device.mode.operation_mode.name.lower() return None - @property - def preset_modes(self) -> list[str] | None: - """Return a list of available preset modes. - - Requires ClimateEntityFeature.PRESET_MODE. - """ - if self._device.mode is None: - return None - - presets = [ - PRESET_MODES.get(operation_mode) - for operation_mode in self._device.mode.operation_modes - ] - return list(filter(None, presets)) - async def async_set_preset_mode(self, preset_mode: str) -> None: """Set new preset mode.""" if ( self._device.mode is not None - and self._device.mode.supports_operation_mode - and (knx_operation_mode := PRESET_MODES_INV.get(preset_mode)) is not None + and self._device.mode.operation_modes # empty list when not writable ): - await self._device.mode.set_operation_mode(knx_operation_mode) + await self._device.mode.set_operation_mode( + HVACOperationMode[preset_mode.upper()] + ) self.async_write_ha_state() @property diff --git a/homeassistant/components/knx/config_flow.py b/homeassistant/components/knx/config_flow.py index 2fc1f49800c..7e4db1f889b 100644 --- a/homeassistant/components/knx/config_flow.py +++ b/homeassistant/components/knx/config_flow.py @@ -445,7 +445,7 @@ class KNXCommonFlow(ABC, ConfigEntryBaseFlow): try: key_bytes = bytes.fromhex(user_input[CONF_KNX_ROUTING_BACKBONE_KEY]) if len(key_bytes) != 16: - raise ValueError + raise ValueError # noqa: TRY301 except ValueError: errors[CONF_KNX_ROUTING_BACKBONE_KEY] = "invalid_backbone_key" if not errors: diff --git a/homeassistant/components/knx/const.py b/homeassistant/components/knx/const.py index 6400f0fe466..9ceb18385cb 100644 --- a/homeassistant/components/knx/const.py +++ b/homeassistant/components/knx/const.py @@ -6,18 +6,10 @@ from collections.abc import Awaitable, Callable from enum import Enum from typing import Final, TypedDict -from xknx.dpt.dpt_20 import HVACControllerMode, HVACOperationMode +from xknx.dpt.dpt_20 import HVACControllerMode from xknx.telegram import Telegram -from homeassistant.components.climate import ( - PRESET_AWAY, - PRESET_COMFORT, - PRESET_ECO, - PRESET_NONE, - PRESET_SLEEP, - HVACAction, - HVACMode, -) +from homeassistant.components.climate import HVACAction, HVACMode from homeassistant.const import Platform DOMAIN: Final = "knx" @@ -174,12 +166,3 @@ CURRENT_HVAC_ACTIONS: Final = { HVACMode.FAN_ONLY: HVACAction.FAN, HVACMode.DRY: HVACAction.DRYING, } - -PRESET_MODES: Final = { - # Map DPT 20.102 HVAC operating modes to HA presets - HVACOperationMode.AUTO: PRESET_NONE, - HVACOperationMode.BUILDING_PROTECTION: PRESET_ECO, - HVACOperationMode.ECONOMY: PRESET_SLEEP, - HVACOperationMode.STANDBY: PRESET_AWAY, - HVACOperationMode.COMFORT: PRESET_COMFORT, -} diff --git a/homeassistant/components/knx/expose.py b/homeassistant/components/knx/expose.py index 29d0be998b6..82bee48ba69 100644 --- a/homeassistant/components/knx/expose.py +++ b/homeassistant/components/knx/expose.py @@ -84,8 +84,6 @@ class KNXExposeSensor: self.expose_default = config.get(ExposeSchema.CONF_KNX_EXPOSE_DEFAULT) self.expose_type: int | str = config[ExposeSchema.CONF_KNX_EXPOSE_TYPE] self.value_template: Template | None = config.get(CONF_VALUE_TEMPLATE) - if self.value_template is not None: - self.value_template.hass = hass self._remove_listener: Callable[[], None] | None = None self.device: ExposeSensor = ExposeSensor( @@ -127,6 +125,8 @@ class KNXExposeSensor: def _get_expose_value(self, state: State | None) -> bool | int | float | str | None: """Extract value from state.""" if state is None or state.state in (STATE_UNKNOWN, STATE_UNAVAILABLE): + if self.expose_default is None: + return None value = self.expose_default elif self.expose_attribute is not None: _attr = state.attributes.get(self.expose_attribute) @@ -156,12 +156,22 @@ class KNXExposeSensor: if value is not None and ( isinstance(self.device.sensor_value, RemoteValueSensor) ): - if issubclass(self.device.sensor_value.dpt_class, DPTNumeric): - return float(value) - if issubclass(self.device.sensor_value.dpt_class, DPTString): - # DPT 16.000 only allows up to 14 Bytes - return str(value)[:14] - return value + try: + if issubclass(self.device.sensor_value.dpt_class, DPTNumeric): + return float(value) + if issubclass(self.device.sensor_value.dpt_class, DPTString): + # DPT 16.000 only allows up to 14 Bytes + return str(value)[:14] + except (ValueError, TypeError) as err: + _LOGGER.warning( + 'Could not expose %s %s value "%s" to KNX: Conversion failed: %s', + self.entity_id, + self.expose_attribute or "state", + value, + err, + ) + return None + return value # type: ignore[no-any-return] async def _async_entity_changed(self, event: Event[EventStateChangedData]) -> None: """Handle entity change.""" diff --git a/homeassistant/components/knx/icons.json b/homeassistant/components/knx/icons.json index 736923375ee..756b6ab9f9e 100644 --- a/homeassistant/components/knx/icons.json +++ b/homeassistant/components/knx/icons.json @@ -1,5 +1,19 @@ { "entity": { + "climate": { + "knx_climate": { + "state_attributes": { + "preset_mode": { + "state": { + "comfort": "mdi:sofa", + "standby": "mdi:home-export-outline", + "economy": "mdi:leaf", + "building_protection": "mdi:sun-snowflake-variant" + } + } + } + } + }, "sensor": { "individual_address": { "default": "mdi:router-network" @@ -22,10 +36,20 @@ } }, "services": { - "send": "mdi:email-arrow-right", - "read": "mdi:email-search", - "event_register": "mdi:home-import-outline", - "exposure_register": "mdi:home-export-outline", - "reload": "mdi:reload" + "send": { + "service": "mdi:email-arrow-right" + }, + "read": { + "service": "mdi:email-search" + }, + "event_register": { + "service": "mdi:home-import-outline" + }, + "exposure_register": { + "service": "mdi:home-export-outline" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/knx/manifest.json b/homeassistant/components/knx/manifest.json index b7efd14fa2a..181dca6f4b8 100644 --- a/homeassistant/components/knx/manifest.json +++ b/homeassistant/components/knx/manifest.json @@ -13,7 +13,7 @@ "requirements": [ "xknx==3.1.1", "xknxproject==3.7.1", - "knx-frontend==2024.8.9.225351" + "knx-frontend==2024.9.4.64538" ], "single_config_entry": true } diff --git a/homeassistant/components/knx/project.py b/homeassistant/components/knx/project.py index b5bafe00724..04cac68aab0 100644 --- a/homeassistant/components/knx/project.py +++ b/homeassistant/components/knx/project.py @@ -8,12 +8,13 @@ from typing import Final from xknx import XKNX from xknx.dpt import DPTBase -from xknx.telegram.address import DeviceAddressableType +from xknx.telegram.address import DeviceAddressableType, GroupAddress, GroupAddressType from xknxproject import XKNXProj from xknxproject.models import ( Device, DPTType, GroupAddress as GroupAddressModel, + GroupAddressStyle as XknxProjectGroupAddressStyle, KNXProject as KNXProjectModel, ProjectInfo, ) @@ -90,6 +91,7 @@ class KNXProject: if project := data or await self._store.async_load(): self.devices = project["devices"] self.info = project["info"] + GroupAddress.address_format = self.get_address_format() xknx.group_address_dpt.clear() xknx_ga_dict: dict[DeviceAddressableType, DPTType] = {} @@ -133,3 +135,13 @@ class KNXProject: async def get_knxproject(self) -> KNXProjectModel | None: """Load the project file from local storage.""" return await self._store.async_load() + + def get_address_format(self) -> GroupAddressType: + """Return the address format for group addresses used in the project.""" + if self.info: + match self.info["group_address_style"]: + case XknxProjectGroupAddressStyle.TWOLEVEL.value: + return GroupAddressType.SHORT + case XknxProjectGroupAddressStyle.FREE.value: + return GroupAddressType.FREE + return GroupAddressType.LONG diff --git a/homeassistant/components/knx/strings.json b/homeassistant/components/knx/strings.json index d6e1e2f49f0..8d8692f6b7a 100644 --- a/homeassistant/components/knx/strings.json +++ b/homeassistant/components/knx/strings.json @@ -267,6 +267,22 @@ } }, "entity": { + "climate": { + "knx_climate": { + "state_attributes": { + "preset_mode": { + "name": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::name%]", + "state": { + "auto": "Auto", + "comfort": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::comfort%]", + "standby": "Standby", + "economy": "[%key:component::climate::entity_component::_::state_attributes::preset_mode::state::eco%]", + "building_protection": "Building protection" + } + } + } + } + }, "sensor": { "individual_address": { "name": "[%key:component::knx::config::step::routing::data::individual_address%]" diff --git a/homeassistant/components/knx/telegrams.py b/homeassistant/components/knx/telegrams.py index a96d841a07d..f4b31fd11f9 100644 --- a/homeassistant/components/knx/telegrams.py +++ b/homeassistant/components/knx/telegrams.py @@ -9,7 +9,7 @@ from xknx import XKNX from xknx.dpt import DPTArray, DPTBase, DPTBinary from xknx.dpt.dpt import DPTComplexData, DPTEnumData from xknx.exceptions import XKNXException -from xknx.telegram import Telegram +from xknx.telegram import Telegram, TelegramDirection from xknx.telegram.apci import GroupValueResponse, GroupValueWrite from homeassistant.core import HomeAssistant @@ -119,6 +119,8 @@ class Telegrams: device := self.project.devices.get(f"{telegram.source_address}") ) is not None: src_name = f"{device['manufacturer_name']} {device['name']}" + elif telegram.direction is TelegramDirection.OUTGOING: + src_name = "Home Assistant" if isinstance(telegram.payload, (GroupValueWrite, GroupValueResponse)): payload_data = telegram.payload.value.value diff --git a/homeassistant/components/knx/websocket.py b/homeassistant/components/knx/websocket.py index 4af3012741a..5c21a941484 100644 --- a/homeassistant/components/knx/websocket.py +++ b/homeassistant/components/knx/websocket.py @@ -2,7 +2,10 @@ from __future__ import annotations -from typing import TYPE_CHECKING, Final +import asyncio +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import TYPE_CHECKING, Any, Final, overload import knx_frontend as knx_panel import voluptuous as vol @@ -77,21 +80,92 @@ async def register_panel(hass: HomeAssistant) -> None: ) +type KnxWebSocketCommandHandler = Callable[ + [HomeAssistant, KNXModule, websocket_api.ActiveConnection, dict[str, Any]], None +] +type KnxAsyncWebSocketCommandHandler = Callable[ + [HomeAssistant, KNXModule, websocket_api.ActiveConnection, dict[str, Any]], + Awaitable[None], +] + + +@overload +def provide_knx( + func: KnxAsyncWebSocketCommandHandler, +) -> websocket_api.const.AsyncWebSocketCommandHandler: ... +@overload +def provide_knx( + func: KnxWebSocketCommandHandler, +) -> websocket_api.const.WebSocketCommandHandler: ... + + +def provide_knx( + func: KnxAsyncWebSocketCommandHandler | KnxWebSocketCommandHandler, +) -> ( + websocket_api.const.AsyncWebSocketCommandHandler + | websocket_api.const.WebSocketCommandHandler +): + """Websocket decorator to provide a KNXModule instance.""" + + def _send_not_loaded_error( + connection: websocket_api.ActiveConnection, msg_id: int + ) -> None: + connection.send_error( + msg_id, + websocket_api.const.ERR_HOME_ASSISTANT_ERROR, + "KNX integration not loaded.", + ) + + if asyncio.iscoroutinefunction(func): + + @wraps(func) + async def with_knx( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: + """Add KNX Module to call function.""" + try: + knx: KNXModule = hass.data[DOMAIN] + except KeyError: + _send_not_loaded_error(connection, msg["id"]) + return + await func(hass, knx, connection, msg) + + else: + + @wraps(func) + def with_knx( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: + """Add KNX Module to call function.""" + try: + knx: KNXModule = hass.data[DOMAIN] + except KeyError: + _send_not_loaded_error(connection, msg["id"]) + return + func(hass, knx, connection, msg) + + return with_knx + + @websocket_api.require_admin @websocket_api.websocket_command( { vol.Required("type"): "knx/info", } ) +@provide_knx @callback def ws_info( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] - _project_info = None if project_info := knx.project.info: _project_info = { @@ -119,13 +193,14 @@ def ws_info( } ) @websocket_api.async_response +@provide_knx async def ws_get_knx_project( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get KNX project.""" - knx: KNXModule = hass.data[DOMAIN] knxproject = await knx.project.get_knxproject() connection.send_result( msg["id"], @@ -145,13 +220,14 @@ async def ws_get_knx_project( } ) @websocket_api.async_response +@provide_knx async def ws_project_file_process( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] try: await knx.project.process_project_file( xknx=knx.xknx, @@ -175,13 +251,14 @@ async def ws_project_file_process( } ) @websocket_api.async_response +@provide_knx async def ws_project_file_remove( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command.""" - knx: KNXModule = hass.data[DOMAIN] await knx.project.remove_project_file() connection.send_result(msg["id"]) @@ -192,14 +269,15 @@ async def ws_project_file_remove( vol.Required("type"): "knx/group_monitor_info", } ) +@provide_knx @callback def ws_group_monitor_info( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Handle get info command of group monitor.""" - knx: KNXModule = hass.data[DOMAIN] recent_telegrams = [*knx.telegrams.recent_telegrams] connection.send_result( msg["id"], @@ -272,8 +350,10 @@ def ws_validate_entity( } ) @websocket_api.async_response +@provide_knx async def ws_create_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: @@ -283,7 +363,6 @@ async def ws_create_entity( except EntityStoreValidationException as exc: connection.send_result(msg["id"], exc.validation_error) return - knx: KNXModule = hass.data[DOMAIN] try: entity_id = await knx.config_store.create_entity( # use validation result so defaults are applied @@ -308,8 +387,10 @@ async def ws_create_entity( } ) @websocket_api.async_response +@provide_knx async def ws_update_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: @@ -319,7 +400,6 @@ async def ws_update_entity( except EntityStoreValidationException as exc: connection.send_result(msg["id"], exc.validation_error) return - knx: KNXModule = hass.data[DOMAIN] try: await knx.config_store.update_entity( validated_data[CONF_PLATFORM], @@ -344,13 +424,14 @@ async def ws_update_entity( } ) @websocket_api.async_response +@provide_knx async def ws_delete_entity( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Delete entity from entity store and remove it.""" - knx: KNXModule = hass.data[DOMAIN] try: await knx.config_store.delete_entity(msg[CONF_ENTITY_ID]) except ConfigStoreException as err: @@ -367,14 +448,15 @@ async def ws_delete_entity( vol.Required("type"): "knx/get_entity_entries", } ) +@provide_knx @callback def ws_get_entity_entries( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Get entities configured from entity store.""" - knx: KNXModule = hass.data[DOMAIN] entity_entries = [ entry.extended_dict for entry in knx.config_store.get_entity_entries() ] @@ -388,14 +470,15 @@ def ws_get_entity_entries( vol.Required(CONF_ENTITY_ID): str, } ) +@provide_knx @callback def ws_get_entity_config( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Get entity configuration from entity store.""" - knx: KNXModule = hass.data[DOMAIN] try: config_info = knx.config_store.get_entity_config(msg[CONF_ENTITY_ID]) except ConfigStoreException as err: @@ -414,14 +497,15 @@ def ws_get_entity_config( vol.Optional("area_id"): str, } ) +@provide_knx @callback def ws_create_device( hass: HomeAssistant, + knx: KNXModule, connection: websocket_api.ActiveConnection, msg: dict, ) -> None: """Create a new KNX device.""" - knx: KNXModule = hass.data[DOMAIN] identifier = f"knx_vdev_{ulid_now()}" device_registry = dr.async_get(hass) _device = device_registry.async_get_or_create( diff --git a/homeassistant/components/kodi/config_flow.py b/homeassistant/components/kodi/config_flow.py index e431c72d21e..26b5214c733 100644 --- a/homeassistant/components/kodi/config_flow.py +++ b/homeassistant/components/kodi/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from pykodi import CannotConnectError, InvalidAuthError, Kodi, get_kodi_connection import voluptuous as vol @@ -149,7 +150,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._create_entry() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} @@ -223,12 +226,12 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_ws_port_form(errors) - async def async_step_import(self, data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from YAML.""" reason = None try: - await validate_http(self.hass, data) - await validate_ws(self.hass, data) + await validate_http(self.hass, import_data) + await validate_ws(self.hass, import_data) except InvalidAuth: _LOGGER.exception("Invalid Kodi credentials") reason = "invalid_auth" @@ -239,7 +242,9 @@ class KodiConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.exception("Unexpected exception") reason = "unknown" else: - return self.async_create_entry(title=data[CONF_NAME], data=data) + return self.async_create_entry( + title=import_data[CONF_NAME], data=import_data + ) return self.async_abort(reason=reason) diff --git a/homeassistant/components/kodi/icons.json b/homeassistant/components/kodi/icons.json index 07bd246e92d..d9c32630961 100644 --- a/homeassistant/components/kodi/icons.json +++ b/homeassistant/components/kodi/icons.json @@ -1,6 +1,10 @@ { "services": { - "add_to_playlist": "mdi:playlist-plus", - "call_method": "mdi:console" + "add_to_playlist": { + "service": "mdi:playlist-plus" + }, + "call_method": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/konnected/config_flow.py b/homeassistant/components/konnected/config_flow.py index 29f4fbe2a49..48016cd066a 100644 --- a/homeassistant/components/konnected/config_flow.py +++ b/homeassistant/components/konnected/config_flow.py @@ -202,24 +202,24 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): random.choices(f"{string.ascii_uppercase}{string.digits}", k=20) ) - async def async_step_import(self, device_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a configuration.yaml config. This flow is triggered by `async_setup` for configured panels. """ - _LOGGER.debug(device_config) + _LOGGER.debug(import_data) # save the data and confirm connection via user step - await self.async_set_unique_id(device_config["id"]) - self.options = device_config[CONF_DEFAULT_OPTIONS] + await self.async_set_unique_id(import_data["id"]) + self.options = import_data[CONF_DEFAULT_OPTIONS] # config schema ensures we have port if we have host - if device_config.get(CONF_HOST): + if import_data.get(CONF_HOST): # automatically connect if we have host info return await self.async_step_user( user_input={ - CONF_HOST: device_config[CONF_HOST], - CONF_PORT: device_config[CONF_PORT], + CONF_HOST: import_data[CONF_HOST], + CONF_PORT: import_data[CONF_PORT], } ) @@ -303,7 +303,9 @@ class KonnectedFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="unknown") - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Connect to panel and get config.""" errors = {} if user_input: diff --git a/homeassistant/components/konnected/switch.py b/homeassistant/components/konnected/switch.py index 424a2d9164d..65b99d623f1 100644 --- a/homeassistant/components/konnected/switch.py +++ b/homeassistant/components/konnected/switch.py @@ -102,13 +102,12 @@ class KonnectedSwitch(SwitchEntity): if resp.get(ATTR_STATE) is not None: self._set_state(self._boolean_state(resp.get(ATTR_STATE))) - def _boolean_state(self, int_state): - if int_state is None: - return False + def _boolean_state(self, int_state: int | None) -> bool | None: if int_state == 0: return self._activation == STATE_LOW if int_state == 1: return self._activation == STATE_HIGH + return None def _set_state(self, state): self._attr_is_on = state diff --git a/homeassistant/components/kostal_plenticore/config_flow.py b/homeassistant/components/kostal_plenticore/config_flow.py index 547afa9d71b..59c737a0874 100644 --- a/homeassistant/components/kostal_plenticore/config_flow.py +++ b/homeassistant/components/kostal_plenticore/config_flow.py @@ -1,12 +1,13 @@ """Config flow for Kostal Plenticore Solar Inverter integration.""" import logging +from typing import Any from aiohttp.client_exceptions import ClientError from pykoplenti import ApiClient, AuthenticationException import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_BASE, CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -44,10 +45,11 @@ class KostalPlenticoreConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} - hostname = None if user_input is not None: self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) @@ -62,8 +64,7 @@ class KostalPlenticoreConfigFlow(ConfigFlow, domain=DOMAIN): except Exception: _LOGGER.exception("Unexpected exception") errors[CONF_BASE] = "unknown" - - if not errors: + else: return self.async_create_entry(title=hostname, data=user_input) return self.async_show_form( diff --git a/homeassistant/components/lamarzocco/__init__.py b/homeassistant/components/lamarzocco/__init__.py index dfcaa54047d..02e47ecd78e 100644 --- a/homeassistant/components/lamarzocco/__init__.py +++ b/homeassistant/components/lamarzocco/__init__.py @@ -53,6 +53,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: LaMarzoccoConfigEntry) - cloud_client = LaMarzoccoCloudClient( username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD], + client=get_async_client(hass), ) # initialize local API diff --git a/homeassistant/components/lamarzocco/manifest.json b/homeassistant/components/lamarzocco/manifest.json index 73d14250525..181a2b9ab9b 100644 --- a/homeassistant/components/lamarzocco/manifest.json +++ b/homeassistant/components/lamarzocco/manifest.json @@ -22,5 +22,5 @@ "integration_type": "device", "iot_class": "cloud_polling", "loggers": ["lmcloud"], - "requirements": ["lmcloud==1.1.13"] + "requirements": ["lmcloud==1.2.2"] } diff --git a/homeassistant/components/lametric/__init__.py b/homeassistant/components/lametric/__init__.py index 10fdee0ddc7..779cfa10445 100644 --- a/homeassistant/components/lametric/__init__.py +++ b/homeassistant/components/lametric/__init__.py @@ -12,7 +12,7 @@ from .const import DOMAIN, PLATFORMS from .coordinator import LaMetricDataUpdateCoordinator from .services import async_setup_services -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: diff --git a/homeassistant/components/lametric/icons.json b/homeassistant/components/lametric/icons.json index 7e1841272cf..229770c96dc 100644 --- a/homeassistant/components/lametric/icons.json +++ b/homeassistant/components/lametric/icons.json @@ -39,7 +39,11 @@ } }, "services": { - "chart": "mdi:chart-areaspline-variant", - "message": "mdi:message" + "chart": { + "service": "mdi:chart-areaspline-variant" + }, + "message": { + "service": "mdi:message" + } } } diff --git a/homeassistant/components/lawn_mower/__init__.py b/homeassistant/components/lawn_mower/__init__.py index 27765d207d8..9eef6ad8343 100644 --- a/homeassistant/components/lawn_mower/__init__.py +++ b/homeassistant/components/lawn_mower/__init__.py @@ -39,15 +39,15 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_START_MOWING, - {}, + None, "async_start_mowing", [LawnMowerEntityFeature.START_MOWING], ) component.async_register_entity_service( - SERVICE_PAUSE, {}, "async_pause", [LawnMowerEntityFeature.PAUSE] + SERVICE_PAUSE, None, "async_pause", [LawnMowerEntityFeature.PAUSE] ) component.async_register_entity_service( - SERVICE_DOCK, {}, "async_dock", [LawnMowerEntityFeature.DOCK] + SERVICE_DOCK, None, "async_dock", [LawnMowerEntityFeature.DOCK] ) return True diff --git a/homeassistant/components/lawn_mower/const.py b/homeassistant/components/lawn_mower/const.py index e060abe6423..231be83ed88 100644 --- a/homeassistant/components/lawn_mower/const.py +++ b/homeassistant/components/lawn_mower/const.py @@ -18,6 +18,9 @@ class LawnMowerActivity(StrEnum): DOCKED = "docked" """Device is docked.""" + RETURNING = "returning" + """Device is returning.""" + class LawnMowerEntityFeature(IntFlag): """Supported features of the lawn mower entity.""" diff --git a/homeassistant/components/lawn_mower/icons.json b/homeassistant/components/lawn_mower/icons.json index b25bf927fcd..2fa1f79efa1 100644 --- a/homeassistant/components/lawn_mower/icons.json +++ b/homeassistant/components/lawn_mower/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "dock": "mdi:home-import-outline", - "pause": "mdi:pause", - "start_mowing": "mdi:play" + "dock": { + "service": "mdi:home-import-outline" + }, + "pause": { + "service": "mdi:pause" + }, + "start_mowing": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/lawn_mower/strings.json b/homeassistant/components/lawn_mower/strings.json index 15ed50ca6c5..ebaea4ffd6a 100644 --- a/homeassistant/components/lawn_mower/strings.json +++ b/homeassistant/components/lawn_mower/strings.json @@ -7,7 +7,8 @@ "error": "Error", "paused": "[%key:common::state::paused%]", "mowing": "Mowing", - "docked": "Docked" + "docked": "Docked", + "returning": "Returning" } } }, diff --git a/homeassistant/components/lcn/__init__.py b/homeassistant/components/lcn/__init__.py index 6866a10d55e..75f417cb3a5 100644 --- a/homeassistant/components/lcn/__init__.py +++ b/homeassistant/components/lcn/__init__.py @@ -27,6 +27,7 @@ from homeassistant.helpers.entity import Entity from homeassistant.helpers.typing import ConfigType from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DIM_MODE, CONF_DOMAIN_DATA, CONF_SK_NUM_TRIES, @@ -47,6 +48,7 @@ from .helpers import ( ) from .schemas import CONFIG_SCHEMA # noqa: F401 from .services import SERVICES +from .websocket import register_panel_and_ws_api _LOGGER = logging.getLogger(__name__) @@ -115,6 +117,7 @@ async def async_setup_entry( _LOGGER.debug('LCN connected to "%s"', config_entry.title) hass.data[DOMAIN][config_entry.entry_id] = { CONNECTION: lcn_connection, + ADD_ENTITIES_CALLBACKS: {}, } # Update config_entry with LCN device serials await async_update_config_entry(hass, config_entry) @@ -140,6 +143,8 @@ async def async_setup_entry( DOMAIN, service_name, service(hass).async_call_service, service.schema ) + await register_panel_and_ws_api(hass) + return True diff --git a/homeassistant/components/lcn/binary_sensor.py b/homeassistant/components/lcn/binary_sensor.py index 35836e4653e..a0f8e1cf360 100644 --- a/homeassistant/components/lcn/binary_sensor.py +++ b/homeassistant/components/lcn/binary_sensor.py @@ -1,6 +1,7 @@ """Support for LCN binary sensors.""" -from __future__ import annotations +from collections.abc import Iterable +from functools import partial import pypck @@ -15,26 +16,47 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import LcnEntity -from .const import BINSENSOR_PORTS, CONF_DOMAIN_DATA, SETPOINTS +from .const import ( + ADD_ENTITIES_CALLBACKS, + BINSENSOR_PORTS, + CONF_DOMAIN_DATA, + DOMAIN, + SETPOINTS, +) from .helpers import DeviceConnectionType, InputType, get_device_connection -def create_lcn_binary_sensor_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - - if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in SETPOINTS: - return LcnRegulatorLockSensor( - entity_config, config_entry.entry_id, device_connection +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnRegulatorLockSensor | LcnBinarySensor | LcnLockKeysSensor] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry ) - if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in BINSENSOR_PORTS: - return LcnBinarySensor(entity_config, config_entry.entry_id, device_connection) - # in KEY - return LcnLockKeysSensor(entity_config, config_entry.entry_id, device_connection) + + if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in SETPOINTS: + entities.append( + LcnRegulatorLockSensor( + entity_config, config_entry.entry_id, device_connection + ) + ) + elif entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in BINSENSOR_PORTS: + entities.append( + LcnBinarySensor(entity_config, config_entry.entry_id, device_connection) + ) + else: # in KEY + entities.append( + LcnLockKeysSensor( + entity_config, config_entry.entry_id, device_connection + ) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -43,11 +65,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, + ) - async_add_entities( - create_lcn_binary_sensor_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_BINARY_SENSOR + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_BINARY_SENSOR: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_BINARY_SENSOR + ), ) diff --git a/homeassistant/components/lcn/climate.py b/homeassistant/components/lcn/climate.py index c03061618f7..0142894a16b 100644 --- a/homeassistant/components/lcn/climate.py +++ b/homeassistant/components/lcn/climate.py @@ -1,7 +1,7 @@ """Support for LCN climate control.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any, cast import pypck @@ -28,26 +28,37 @@ from homeassistant.helpers.typing import ConfigType from . import LcnEntity from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, CONF_LOCKABLE, CONF_MAX_TEMP, CONF_MIN_TEMP, CONF_SETPOINT, + DOMAIN, ) from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_climate_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnClimate] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - return LcnClimate(entity_config, config_entry.entry_id, device_connection) + entities.append( + LcnClimate(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -56,11 +67,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, + ) - async_add_entities( - create_lcn_climate_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_CLIMATE + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_CLIMATE: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_CLIMATE + ), ) diff --git a/homeassistant/components/lcn/config_flow.py b/homeassistant/components/lcn/config_flow.py index d05eb896f27..c38a16cc21e 100644 --- a/homeassistant/components/lcn/config_flow.py +++ b/homeassistant/components/lcn/config_flow.py @@ -3,32 +3,51 @@ from __future__ import annotations import logging +from typing import Any import pypck +import voluptuous as vol -from homeassistant.config_entries import ( - SOURCE_IMPORT, - ConfigEntry, - ConfigFlow, - ConfigFlowResult, -) +from homeassistant import config_entries +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import ( + CONF_BASE, + CONF_DEVICES, + CONF_ENTITIES, CONF_HOST, CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType -from .const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DOMAIN +from .const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DIM_MODES, DOMAIN from .helpers import purge_device_registry, purge_entity_registry _LOGGER = logging.getLogger(__name__) +CONFIG_DATA = { + vol.Required(CONF_IP_ADDRESS, default=""): str, + vol.Required(CONF_PORT, default=4114): cv.positive_int, + vol.Required(CONF_USERNAME, default=""): str, + vol.Required(CONF_PASSWORD, default=""): str, + vol.Required(CONF_SK_NUM_TRIES, default=0): cv.positive_int, + vol.Required(CONF_DIM_MODE, default="STEPS200"): vol.In(DIM_MODES), +} -def get_config_entry(hass: HomeAssistant, data: ConfigType) -> ConfigEntry | None: +USER_DATA = {vol.Required(CONF_HOST, default="pchk"): str, **CONFIG_DATA} + +CONFIG_SCHEMA = vol.Schema(CONFIG_DATA) +USER_SCHEMA = vol.Schema(USER_DATA) + + +def get_config_entry( + hass: HomeAssistant, data: ConfigType +) -> config_entries.ConfigEntry | None: """Check config entries for already configured entries based on the ip address/port.""" return next( ( @@ -41,8 +60,10 @@ def get_config_entry(hass: HomeAssistant, data: ConfigType) -> ConfigEntry | Non ) -async def validate_connection(host_name: str, data: ConfigType) -> ConfigType: +async def validate_connection(data: ConfigType) -> str | None: """Validate if a connection to LCN can be established.""" + error = None + host_name = data[CONF_HOST] host = data[CONF_IP_ADDRESS] port = data[CONF_PORT] username = data[CONF_USERNAME] @@ -61,49 +82,137 @@ async def validate_connection(host_name: str, data: ConfigType) -> ConfigType: host, port, username, password, settings=settings ) - await connection.async_connect(timeout=5) + try: + await connection.async_connect(timeout=5) + _LOGGER.debug("LCN connection validated") + except pypck.connection.PchkAuthenticationError: + _LOGGER.warning('Authentication on PCHK "%s" failed', host_name) + error = "authentication_error" + except pypck.connection.PchkLicenseError: + _LOGGER.warning( + 'Maximum number of connections on PCHK "%s" was ' + "reached. An additional license key is required", + host_name, + ) + error = "license_error" + except (TimeoutError, ConnectionRefusedError): + _LOGGER.warning('Connection to PCHK "%s" failed', host_name) + error = "connection_refused" - _LOGGER.debug("LCN connection validated") await connection.async_close() - return data + return error -class LcnFlowHandler(ConfigFlow, domain=DOMAIN): +class LcnFlowHandler(config_entries.ConfigFlow, domain=DOMAIN): """Handle a LCN config flow.""" VERSION = 1 - async def async_step_import(self, data: ConfigType) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import existing configuration from LCN.""" - host_name = data[CONF_HOST] # validate the imported connection parameters - try: - await validate_connection(host_name, data) - except pypck.connection.PchkAuthenticationError: - _LOGGER.warning('Authentication on PCHK "%s" failed', host_name) - return self.async_abort(reason="authentication_error") - except pypck.connection.PchkLicenseError: - _LOGGER.warning( - ( - 'Maximum number of connections on PCHK "%s" was ' - "reached. An additional license key is required" - ), - host_name, + if error := await validate_connection(import_data): + async_create_issue( + self.hass, + DOMAIN, + error, + is_fixable=False, + issue_domain=DOMAIN, + severity=IssueSeverity.ERROR, + translation_key=error, + translation_placeholders={ + "url": "/config/integrations/dashboard/add?domain=lcn" + }, ) - return self.async_abort(reason="license_error") - except TimeoutError: - _LOGGER.warning('Connection to PCHK "%s" failed', host_name) - return self.async_abort(reason="connection_timeout") + return self.async_abort(reason=error) + + async_create_issue( + self.hass, + HOMEASSISTANT_DOMAIN, + f"deprecated_yaml_{DOMAIN}", + breaks_in_ha_version="2024.12.0", + is_fixable=False, + is_persistent=False, + issue_domain=DOMAIN, + severity=IssueSeverity.WARNING, + translation_key="deprecated_yaml", + translation_placeholders={ + "domain": DOMAIN, + "integration_title": "LCN", + }, + ) # check if we already have a host with the same address configured - if entry := get_config_entry(self.hass, data): - entry.source = SOURCE_IMPORT + if entry := get_config_entry(self.hass, import_data): + entry.source = config_entries.SOURCE_IMPORT # Cleanup entity and device registry, if we imported from configuration.yaml to # remove orphans when entities were removed from configuration - purge_entity_registry(self.hass, entry.entry_id, data) - purge_device_registry(self.hass, entry.entry_id, data) + purge_entity_registry(self.hass, entry.entry_id, import_data) + purge_device_registry(self.hass, entry.entry_id, import_data) - self.hass.config_entries.async_update_entry(entry, data=data) + self.hass.config_entries.async_update_entry(entry, data=import_data) return self.async_abort(reason="existing_configuration_updated") - return self.async_create_entry(title=f"{host_name}", data=data) + return self.async_create_entry( + title=f"{import_data[CONF_HOST]}", data=import_data + ) + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> config_entries.ConfigFlowResult: + """Handle a flow initiated by the user.""" + if user_input is None: + return self.async_show_form(step_id="user", data_schema=USER_SCHEMA) + + errors = None + if get_config_entry(self.hass, user_input): + errors = {CONF_BASE: "already_configured"} + elif (error := await validate_connection(user_input)) is not None: + errors = {CONF_BASE: error} + + if errors is not None: + return self.async_show_form( + step_id="user", + data_schema=self.add_suggested_values_to_schema( + USER_SCHEMA, user_input + ), + errors=errors, + ) + + data: dict = { + **user_input, + CONF_DEVICES: [], + CONF_ENTITIES: [], + } + + return self.async_create_entry(title=data[CONF_HOST], data=data) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> config_entries.ConfigFlowResult: + """Reconfigure LCN configuration.""" + errors = None + entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + assert entry + + if user_input is not None: + user_input[CONF_HOST] = entry.data[CONF_HOST] + + await self.hass.config_entries.async_unload(entry.entry_id) + if (error := await validate_connection(user_input)) is not None: + errors = {CONF_BASE: error} + + if errors is None: + data = entry.data.copy() + data.update(user_input) + self.hass.config_entries.async_update_entry(entry, data=data) + await self.hass.config_entries.async_setup(entry.entry_id) + return self.async_abort(reason="reconfigure_successful") + + await self.hass.config_entries.async_setup(entry.entry_id) + + return self.async_show_form( + step_id="reconfigure", + data_schema=self.add_suggested_values_to_schema(CONFIG_SCHEMA, entry.data), + errors=errors or {}, + ) diff --git a/homeassistant/components/lcn/const.py b/homeassistant/components/lcn/const.py index bcf9ecdf295..24d2e68495c 100644 --- a/homeassistant/components/lcn/const.py +++ b/homeassistant/components/lcn/const.py @@ -18,6 +18,7 @@ DOMAIN = "lcn" DATA_LCN = "lcn" DEFAULT_NAME = "pchk" +ADD_ENTITIES_CALLBACKS = "add_entities_callbacks" CONNECTION = "connection" CONF_HARDWARE_SERIAL = "hardware_serial" CONF_SOFTWARE_SERIAL = "software_serial" diff --git a/homeassistant/components/lcn/cover.py b/homeassistant/components/lcn/cover.py index edc60a202a1..1e428a350d6 100644 --- a/homeassistant/components/lcn/cover.py +++ b/homeassistant/components/lcn/cover.py @@ -1,7 +1,7 @@ """Support for LCN covers.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any import pypck @@ -14,24 +14,41 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import LcnEntity -from .const import CONF_DOMAIN_DATA, CONF_MOTOR, CONF_REVERSE_TIME +from .const import ( + ADD_ENTITIES_CALLBACKS, + CONF_DOMAIN_DATA, + CONF_MOTOR, + CONF_REVERSE_TIME, + DOMAIN, +) from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_cover_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnOutputsCover | LcnRelayCover] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - if entity_config[CONF_DOMAIN_DATA][CONF_MOTOR] in "OUTPUTS": - return LcnOutputsCover(entity_config, config_entry.entry_id, device_connection) - # in RELAYS - return LcnRelayCover(entity_config, config_entry.entry_id, device_connection) + if entity_config[CONF_DOMAIN_DATA][CONF_MOTOR] in "OUTPUTS": + entities.append( + LcnOutputsCover(entity_config, config_entry.entry_id, device_connection) + ) + else: # in RELAYS + entities.append( + LcnRelayCover(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -40,11 +57,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN cover entities from a config entry.""" + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, + ) - async_add_entities( - create_lcn_cover_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_COVER + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_COVER: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_COVER + ), ) diff --git a/homeassistant/components/lcn/helpers.py b/homeassistant/components/lcn/helpers.py index d46628fc6da..fd8c59ad46f 100644 --- a/homeassistant/components/lcn/helpers.py +++ b/homeassistant/components/lcn/helpers.py @@ -423,6 +423,16 @@ async def async_update_config_entry( hass.config_entries.async_update_entry(config_entry, data=new_data) +def get_device_config( + address: AddressType, config_entry: ConfigEntry +) -> ConfigType | None: + """Return the device configuration for given address and ConfigEntry.""" + for device_config in config_entry.data[CONF_DEVICES]: + if tuple(device_config[CONF_ADDRESS]) == address: + return cast(ConfigType, device_config) + return None + + def has_unique_host_names(hosts: list[ConfigType]) -> list[ConfigType]: """Validate that all connection names are unique. diff --git a/homeassistant/components/lcn/icons.json b/homeassistant/components/lcn/icons.json index c8b451a79ea..944c3938a92 100644 --- a/homeassistant/components/lcn/icons.json +++ b/homeassistant/components/lcn/icons.json @@ -1,17 +1,43 @@ { "services": { - "output_abs": "mdi:brightness-auto", - "output_rel": "mdi:brightness-7", - "output_toggle": "mdi:toggle-switch", - "relays": "mdi:light-switch-off", - "led": "mdi:led-on", - "var_abs": "mdi:wrench", - "var_reset": "mdi:reload", - "var_rel": "mdi:wrench", - "lock_regulator": "mdi:lock", - "send_keys": "mdi:alarm-panel", - "lock_keys": "mdi:lock", - "dyn_text": "mdi:form-textbox", - "pck": "mdi:package-variant-closed" + "output_abs": { + "service": "mdi:brightness-auto" + }, + "output_rel": { + "service": "mdi:brightness-7" + }, + "output_toggle": { + "service": "mdi:toggle-switch" + }, + "relays": { + "service": "mdi:light-switch-off" + }, + "led": { + "service": "mdi:led-on" + }, + "var_abs": { + "service": "mdi:wrench" + }, + "var_reset": { + "service": "mdi:reload" + }, + "var_rel": { + "service": "mdi:wrench" + }, + "lock_regulator": { + "service": "mdi:lock" + }, + "send_keys": { + "service": "mdi:alarm-panel" + }, + "lock_keys": { + "service": "mdi:lock" + }, + "dyn_text": { + "service": "mdi:form-textbox" + }, + "pck": { + "service": "mdi:package-variant-closed" + } } } diff --git a/homeassistant/components/lcn/light.py b/homeassistant/components/lcn/light.py index 584161a0829..799ed0036d8 100644 --- a/homeassistant/components/lcn/light.py +++ b/homeassistant/components/lcn/light.py @@ -1,7 +1,7 @@ """Support for LCN lights.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any import pypck @@ -22,10 +22,12 @@ from homeassistant.helpers.typing import ConfigType from . import LcnEntity from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DIMMABLE, CONF_DOMAIN_DATA, CONF_OUTPUT, CONF_TRANSITION, + DOMAIN, OUTPUT_PORTS, ) from .helpers import DeviceConnectionType, InputType, get_device_connection @@ -33,18 +35,29 @@ from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_light_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnOutputLight | LcnRelayLight] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: - return LcnOutputLight(entity_config, config_entry.entry_id, device_connection) - # in RELAY_PORTS - return LcnRelayLight(entity_config, config_entry.entry_id, device_connection) + if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: + entities.append( + LcnOutputLight(entity_config, config_entry.entry_id, device_connection) + ) + else: # in RELAY_PORTS + entities.append( + LcnRelayLight(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -53,11 +66,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN light entities from a config entry.""" + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, + ) - async_add_entities( - create_lcn_light_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_LIGHT + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_LIGHT: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_LIGHT + ), ) diff --git a/homeassistant/components/lcn/manifest.json b/homeassistant/components/lcn/manifest.json index 44a4d683c81..f8b7d02b103 100644 --- a/homeassistant/components/lcn/manifest.json +++ b/homeassistant/components/lcn/manifest.json @@ -1,10 +1,12 @@ { "domain": "lcn", "name": "LCN", + "after_dependencies": ["panel_custom"], "codeowners": ["@alengwenus"], - "config_flow": false, + "config_flow": true, + "dependencies": ["http", "websocket_api"], "documentation": "https://www.home-assistant.io/integrations/lcn", "iot_class": "local_push", "loggers": ["pypck"], - "requirements": ["pypck==0.7.20"] + "requirements": ["pypck==0.7.21", "lcn-frontend==0.1.6"] } diff --git a/homeassistant/components/lcn/scene.py b/homeassistant/components/lcn/scene.py index 7e476987c53..52ec0262b55 100644 --- a/homeassistant/components/lcn/scene.py +++ b/homeassistant/components/lcn/scene.py @@ -1,7 +1,7 @@ """Support for LCN scenes.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any import pypck @@ -15,10 +15,12 @@ from homeassistant.helpers.typing import ConfigType from . import LcnEntity from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, CONF_OUTPUTS, CONF_REGISTER, CONF_TRANSITION, + DOMAIN, OUTPUT_PORTS, ) from .helpers import DeviceConnectionType, get_device_connection @@ -26,15 +28,24 @@ from .helpers import DeviceConnectionType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_scene_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnScene] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - return LcnScene(entity_config, config_entry.entry_id, device_connection) + entities.append( + LcnScene(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -43,11 +54,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, + ) - async_add_entities( - create_lcn_scene_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_SCENE + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SCENE: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_SCENE + ), ) diff --git a/homeassistant/components/lcn/schemas.py b/homeassistant/components/lcn/schemas.py index 9927ea5908d..0539e83dea8 100644 --- a/homeassistant/components/lcn/schemas.py +++ b/homeassistant/components/lcn/schemas.py @@ -58,6 +58,8 @@ from .const import ( ) from .helpers import has_unique_host_names, is_address +ADDRESS_SCHEMA = vol.Coerce(tuple) + # # Domain data # @@ -169,23 +171,32 @@ CONNECTION_SCHEMA = vol.Schema( ) CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CONNECTIONS): vol.All( - cv.ensure_list, has_unique_host_names, [CONNECTION_SCHEMA] - ), - vol.Optional(CONF_BINARY_SENSORS): vol.All( - cv.ensure_list, [BINARY_SENSORS_SCHEMA] - ), - vol.Optional(CONF_CLIMATES): vol.All(cv.ensure_list, [CLIMATES_SCHEMA]), - vol.Optional(CONF_COVERS): vol.All(cv.ensure_list, [COVERS_SCHEMA]), - vol.Optional(CONF_LIGHTS): vol.All(cv.ensure_list, [LIGHTS_SCHEMA]), - vol.Optional(CONF_SCENES): vol.All(cv.ensure_list, [SCENES_SCHEMA]), - vol.Optional(CONF_SENSORS): vol.All(cv.ensure_list, [SENSORS_SCHEMA]), - vol.Optional(CONF_SWITCHES): vol.All(cv.ensure_list, [SWITCHES_SCHEMA]), - } - ) - }, + vol.All( + cv.deprecated(DOMAIN), + { + DOMAIN: vol.Schema( + { + vol.Required(CONF_CONNECTIONS): vol.All( + cv.ensure_list, has_unique_host_names, [CONNECTION_SCHEMA] + ), + vol.Optional(CONF_BINARY_SENSORS): vol.All( + cv.ensure_list, [BINARY_SENSORS_SCHEMA] + ), + vol.Optional(CONF_CLIMATES): vol.All( + cv.ensure_list, [CLIMATES_SCHEMA] + ), + vol.Optional(CONF_COVERS): vol.All(cv.ensure_list, [COVERS_SCHEMA]), + vol.Optional(CONF_LIGHTS): vol.All(cv.ensure_list, [LIGHTS_SCHEMA]), + vol.Optional(CONF_SCENES): vol.All(cv.ensure_list, [SCENES_SCHEMA]), + vol.Optional(CONF_SENSORS): vol.All( + cv.ensure_list, [SENSORS_SCHEMA] + ), + vol.Optional(CONF_SWITCHES): vol.All( + cv.ensure_list, [SWITCHES_SCHEMA] + ), + }, + ) + }, + ), extra=vol.ALLOW_EXTRA, ) diff --git a/homeassistant/components/lcn/sensor.py b/homeassistant/components/lcn/sensor.py index 32b97ab8317..7e8941a0bf9 100644 --- a/homeassistant/components/lcn/sensor.py +++ b/homeassistant/components/lcn/sensor.py @@ -1,7 +1,7 @@ """Support for LCN sensors.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from itertools import chain from typing import cast @@ -22,7 +22,9 @@ from homeassistant.helpers.typing import ConfigType from . import LcnEntity from .const import ( + ADD_ENTITIES_CALLBACKS, CONF_DOMAIN_DATA, + DOMAIN, LED_PORTS, S0_INPUTS, SETPOINTS, @@ -32,22 +34,35 @@ from .const import ( from .helpers import DeviceConnectionType, InputType, get_device_connection -def create_lcn_sensor_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) - - if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in chain( - VARIABLES, SETPOINTS, THRESHOLDS, S0_INPUTS - ): - return LcnVariableSensor( - entity_config, config_entry.entry_id, device_connection +def add_lcn_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnVariableSensor | LcnLedLogicSensor] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry ) - # in LED_PORTS + LOGICOP_PORTS - return LcnLedLogicSensor(entity_config, config_entry.entry_id, device_connection) + + if entity_config[CONF_DOMAIN_DATA][CONF_SOURCE] in chain( + VARIABLES, SETPOINTS, THRESHOLDS, S0_INPUTS + ): + entities.append( + LcnVariableSensor( + entity_config, config_entry.entry_id, device_connection + ) + ) + else: # in LED_PORTS + LOGICOP_PORTS + entities.append( + LcnLedLogicSensor( + entity_config, config_entry.entry_id, device_connection + ) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -56,11 +71,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + add_entities = partial( + add_lcn_entities, + hass, + config_entry, + async_add_entities, + ) - async_add_entities( - create_lcn_sensor_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_SENSOR + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SENSOR: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_SENSOR + ), ) diff --git a/homeassistant/components/lcn/strings.json b/homeassistant/components/lcn/strings.json index 3bab17cbbcd..a5f303c6392 100644 --- a/homeassistant/components/lcn/strings.json +++ b/homeassistant/components/lcn/strings.json @@ -14,6 +14,58 @@ "level": "Level" } }, + "config": { + "step": { + "user": { + "title": "Setup LCN host", + "description": "Set up new connection to LCN host.", + "data": { + "host": "[%key:common::config_flow::data::name%]", + "ip_address": "[%key:common::config_flow::data::ip%]", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]", + "sk_num_tries": "Segment coupler scan attempts", + "dim_mode": "Dimming mode" + } + }, + "reconfigure": { + "title": "Reconfigure LCN host", + "description": "Reconfigure connection to LCN host.", + "data": { + "ip_address": "[%key:common::config_flow::data::ip%]", + "port": "[%key:common::config_flow::data::port%]", + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]", + "sk_num_tries": "Segment coupler scan attempts", + "dim_mode": "Dimming mode" + } + } + }, + "error": { + "authentication_error": "Authentication failed. Wrong username or password.", + "license_error": "Maximum number of connections was reached. An additional licence key is required.", + "connection_refused": "Unable to connect to PCHK. Check IP and port.", + "already_configured": "PCHK connection using the same ip address/port is already configured." + }, + "abort": { + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" + } + }, + "issues": { + "authentication_error": { + "title": "Authentication failed.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure username and password are correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, + "license_error": { + "title": "Maximum number of connections was reached.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure sufficient PCHK licenses are registered and restart Home Assistant.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + }, + "connection_refused": { + "title": "Unable to connect to PCHK.", + "description": "Configuring LCN using YAML is being removed but there was an error importing your YAML configuration.\n\nEnsure the connection (IP and port) to the LCN bus coupler is correct.\n\nConsider removing the LCN YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." + } + }, "services": { "output_abs": { "name": "Output absolute brightness", diff --git a/homeassistant/components/lcn/switch.py b/homeassistant/components/lcn/switch.py index b82394ced0d..4c316cef547 100644 --- a/homeassistant/components/lcn/switch.py +++ b/homeassistant/components/lcn/switch.py @@ -1,7 +1,7 @@ """Support for LCN switches.""" -from __future__ import annotations - +from collections.abc import Iterable +from functools import partial from typing import Any import pypck @@ -14,24 +14,41 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType from . import LcnEntity -from .const import CONF_DOMAIN_DATA, CONF_OUTPUT, OUTPUT_PORTS +from .const import ( + ADD_ENTITIES_CALLBACKS, + CONF_DOMAIN_DATA, + CONF_OUTPUT, + DOMAIN, + OUTPUT_PORTS, +) from .helpers import DeviceConnectionType, InputType, get_device_connection PARALLEL_UPDATES = 0 -def create_lcn_switch_entity( - hass: HomeAssistant, entity_config: ConfigType, config_entry: ConfigEntry -) -> LcnEntity: - """Set up an entity for this domain.""" - device_connection = get_device_connection( - hass, entity_config[CONF_ADDRESS], config_entry - ) +def add_lcn_switch_entities( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + entity_configs: Iterable[ConfigType], +) -> None: + """Add entities for this domain.""" + entities: list[LcnOutputSwitch | LcnRelaySwitch] = [] + for entity_config in entity_configs: + device_connection = get_device_connection( + hass, entity_config[CONF_ADDRESS], config_entry + ) - if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: - return LcnOutputSwitch(entity_config, config_entry.entry_id, device_connection) - # in RELAY_PORTS - return LcnRelaySwitch(entity_config, config_entry.entry_id, device_connection) + if entity_config[CONF_DOMAIN_DATA][CONF_OUTPUT] in OUTPUT_PORTS: + entities.append( + LcnOutputSwitch(entity_config, config_entry.entry_id, device_connection) + ) + else: # in RELAY_PORTS + entities.append( + LcnRelaySwitch(entity_config, config_entry.entry_id, device_connection) + ) + + async_add_entities(entities) async def async_setup_entry( @@ -40,11 +57,23 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up LCN switch entities from a config entry.""" + add_entities = partial( + add_lcn_switch_entities, + hass, + config_entry, + async_add_entities, + ) - async_add_entities( - create_lcn_switch_entity(hass, entity_config, config_entry) - for entity_config in config_entry.data[CONF_ENTITIES] - if entity_config[CONF_DOMAIN] == DOMAIN_SWITCH + hass.data[DOMAIN][config_entry.entry_id][ADD_ENTITIES_CALLBACKS].update( + {DOMAIN_SWITCH: add_entities} + ) + + add_entities( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if entity_config[CONF_DOMAIN] == DOMAIN_SWITCH + ), ) diff --git a/homeassistant/components/lcn/websocket.py b/homeassistant/components/lcn/websocket.py new file mode 100644 index 00000000000..b418e362b27 --- /dev/null +++ b/homeassistant/components/lcn/websocket.py @@ -0,0 +1,440 @@ +"""LCN Websocket API.""" + +from __future__ import annotations + +from collections.abc import Awaitable, Callable +from functools import wraps +from typing import TYPE_CHECKING, Any, Final + +import lcn_frontend as lcn_panel +import voluptuous as vol + +from homeassistant.components import panel_custom, websocket_api +from homeassistant.components.http import StaticPathConfig +from homeassistant.components.websocket_api import AsyncWebSocketCommandHandler +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_ADDRESS, + CONF_DEVICES, + CONF_DOMAIN, + CONF_ENTITIES, + CONF_NAME, + CONF_RESOURCE, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import device_registry as dr, entity_registry as er +import homeassistant.helpers.config_validation as cv + +from .const import ( + ADD_ENTITIES_CALLBACKS, + CONF_DOMAIN_DATA, + CONF_HARDWARE_SERIAL, + CONF_HARDWARE_TYPE, + CONF_SOFTWARE_SERIAL, + CONNECTION, + DOMAIN, +) +from .helpers import ( + DeviceConnectionType, + async_update_device_config, + generate_unique_id, + get_device_config, + get_device_connection, + get_resource, + purge_device_registry, + purge_entity_registry, + register_lcn_address_devices, +) +from .schemas import ( + ADDRESS_SCHEMA, + DOMAIN_DATA_BINARY_SENSOR, + DOMAIN_DATA_CLIMATE, + DOMAIN_DATA_COVER, + DOMAIN_DATA_LIGHT, + DOMAIN_DATA_SCENE, + DOMAIN_DATA_SENSOR, + DOMAIN_DATA_SWITCH, +) + +if TYPE_CHECKING: + from homeassistant.components.websocket_api import ActiveConnection + +type AsyncLcnWebSocketCommandHandler = Callable[ + [HomeAssistant, ActiveConnection, dict[str, Any], ConfigEntry], Awaitable[None] +] + +URL_BASE: Final = "/lcn_static" + + +async def register_panel_and_ws_api(hass: HomeAssistant) -> None: + """Register the LCN Panel and Websocket API.""" + websocket_api.async_register_command(hass, websocket_get_device_configs) + websocket_api.async_register_command(hass, websocket_get_entity_configs) + websocket_api.async_register_command(hass, websocket_scan_devices) + websocket_api.async_register_command(hass, websocket_add_device) + websocket_api.async_register_command(hass, websocket_delete_device) + websocket_api.async_register_command(hass, websocket_add_entity) + websocket_api.async_register_command(hass, websocket_delete_entity) + + if DOMAIN not in hass.data.get("frontend_panels", {}): + await hass.http.async_register_static_paths( + [ + StaticPathConfig( + URL_BASE, + path=lcn_panel.locate_dir(), + cache_headers=lcn_panel.is_prod_build, + ) + ] + ) + await panel_custom.async_register_panel( + hass=hass, + frontend_url_path=DOMAIN, + webcomponent_name=lcn_panel.webcomponent_name, + config_panel_domain=DOMAIN, + module_url=f"{URL_BASE}/{lcn_panel.entrypoint_js}", + embed_iframe=True, + require_admin=True, + ) + + +def get_config_entry( + func: AsyncLcnWebSocketCommandHandler, +) -> AsyncWebSocketCommandHandler: + """Websocket decorator to ensure the config_entry exists and return it.""" + + @callback + @wraps(func) + async def get_entry( + hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict + ) -> None: + """Get config_entry.""" + if not (config_entry := hass.config_entries.async_get_entry(msg["entry_id"])): + connection.send_result(msg["id"], False) + else: + await func(hass, connection, msg, config_entry) + + return get_entry + + +@websocket_api.require_admin +@websocket_api.websocket_command( + {vol.Required("type"): "lcn/devices", vol.Required("entry_id"): cv.string} +) +@websocket_api.async_response +@get_config_entry +async def websocket_get_device_configs( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Get device configs.""" + connection.send_result(msg["id"], config_entry.data[CONF_DEVICES]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/entities", + vol.Required("entry_id"): cv.string, + vol.Optional(CONF_ADDRESS): ADDRESS_SCHEMA, + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_get_entity_configs( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Get entities configs.""" + if CONF_ADDRESS in msg: + entity_configs = [ + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if tuple(entity_config[CONF_ADDRESS]) == msg[CONF_ADDRESS] + ] + else: + entity_configs = config_entry.data[CONF_ENTITIES] + + connection.send_result(msg["id"], entity_configs) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + {vol.Required("type"): "lcn/devices/scan", vol.Required("entry_id"): cv.string} +) +@websocket_api.async_response +@get_config_entry +async def websocket_scan_devices( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Scan for new devices.""" + host_connection = hass.data[DOMAIN][config_entry.entry_id][CONNECTION] + await host_connection.scan_modules() + + for device_connection in host_connection.address_conns.values(): + if not device_connection.is_group: + await async_create_or_update_device_in_config_entry( + hass, device_connection, config_entry + ) + + # create/update devices in device registry + register_lcn_address_devices(hass, config_entry) + + connection.send_result(msg["id"], config_entry.data[CONF_DEVICES]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/devices/add", + vol.Required("entry_id"): cv.string, + vol.Required(CONF_ADDRESS): ADDRESS_SCHEMA, + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_add_device( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Add a device.""" + if get_device_config(msg[CONF_ADDRESS], config_entry): + connection.send_result( + msg["id"], False + ) # device_config already in config_entry + return + + device_config = { + CONF_ADDRESS: msg[CONF_ADDRESS], + CONF_NAME: "", + CONF_HARDWARE_SERIAL: -1, + CONF_SOFTWARE_SERIAL: -1, + CONF_HARDWARE_TYPE: -1, + } + + # update device info from LCN + device_connection = get_device_connection(hass, msg[CONF_ADDRESS], config_entry) + await async_update_device_config(device_connection, device_config) + + # add device_config to config_entry + device_configs = [*config_entry.data[CONF_DEVICES], device_config] + data = {**config_entry.data, CONF_DEVICES: device_configs} + hass.config_entries.async_update_entry(config_entry, data=data) + + # create/update devices in device registry + register_lcn_address_devices(hass, config_entry) + + connection.send_result(msg["id"], True) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/devices/delete", + vol.Required("entry_id"): cv.string, + vol.Required(CONF_ADDRESS): ADDRESS_SCHEMA, + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_delete_device( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Delete a device.""" + device_config = get_device_config(msg[CONF_ADDRESS], config_entry) + + device_registry = dr.async_get(hass) + identifiers = { + (DOMAIN, generate_unique_id(config_entry.entry_id, msg[CONF_ADDRESS])) + } + device = device_registry.async_get_device(identifiers, set()) + + if not (device and device_config): + connection.send_result(msg["id"], False) + return + + # remove module/group device from config_entry data + device_configs = [ + dc for dc in config_entry.data[CONF_DEVICES] if dc != device_config + ] + data = {**config_entry.data, CONF_DEVICES: device_configs} + hass.config_entries.async_update_entry(config_entry, data=data) + + # remove all child devices (and entities) from config_entry data + for entity_config in data[CONF_ENTITIES][:]: + if tuple(entity_config[CONF_ADDRESS]) == msg[CONF_ADDRESS]: + data[CONF_ENTITIES].remove(entity_config) + + hass.config_entries.async_update_entry(config_entry, data=data) + + # cleanup registries + purge_entity_registry(hass, config_entry.entry_id, data) + purge_device_registry(hass, config_entry.entry_id, data) + + # return the device config, not all devices !!! + connection.send_result(msg["id"]) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/entities/add", + vol.Required("entry_id"): cv.string, + vol.Required(CONF_ADDRESS): ADDRESS_SCHEMA, + vol.Required(CONF_NAME): cv.string, + vol.Required(CONF_DOMAIN): cv.string, + vol.Required(CONF_DOMAIN_DATA): vol.Any( + DOMAIN_DATA_BINARY_SENSOR, + DOMAIN_DATA_SENSOR, + DOMAIN_DATA_SWITCH, + DOMAIN_DATA_LIGHT, + DOMAIN_DATA_CLIMATE, + DOMAIN_DATA_COVER, + DOMAIN_DATA_SCENE, + ), + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_add_entity( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Add an entity.""" + if not (device_config := get_device_config(msg[CONF_ADDRESS], config_entry)): + connection.send_result(msg["id"], False) + return + + domain_name = msg[CONF_DOMAIN] + domain_data = msg[CONF_DOMAIN_DATA] + resource = get_resource(domain_name, domain_data).lower() + unique_id = generate_unique_id( + config_entry.entry_id, + device_config[CONF_ADDRESS], + resource, + ) + + entity_registry = er.async_get(hass) + if entity_registry.async_get_entity_id(msg[CONF_DOMAIN], DOMAIN, unique_id): + connection.send_result(msg["id"], False) + return + + entity_config = { + CONF_ADDRESS: msg[CONF_ADDRESS], + CONF_NAME: msg[CONF_NAME], + CONF_RESOURCE: resource, + CONF_DOMAIN: domain_name, + CONF_DOMAIN_DATA: domain_data, + } + + # Create new entity and add to corresponding component + add_entities = hass.data[DOMAIN][msg["entry_id"]][ADD_ENTITIES_CALLBACKS][ + msg[CONF_DOMAIN] + ] + add_entities([entity_config]) + + # Add entity config to config_entry + entity_configs = [*config_entry.data[CONF_ENTITIES], entity_config] + data = {**config_entry.data, CONF_ENTITIES: entity_configs} + + # schedule config_entry for save + hass.config_entries.async_update_entry(config_entry, data=data) + + connection.send_result(msg["id"], True) + + +@websocket_api.require_admin +@websocket_api.websocket_command( + { + vol.Required("type"): "lcn/entities/delete", + vol.Required("entry_id"): cv.string, + vol.Required(CONF_ADDRESS): ADDRESS_SCHEMA, + vol.Required(CONF_DOMAIN): cv.string, + vol.Required(CONF_RESOURCE): cv.string, + } +) +@websocket_api.async_response +@get_config_entry +async def websocket_delete_entity( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict, + config_entry: ConfigEntry, +) -> None: + """Delete an entity.""" + entity_config = next( + ( + entity_config + for entity_config in config_entry.data[CONF_ENTITIES] + if ( + tuple(entity_config[CONF_ADDRESS]) == msg[CONF_ADDRESS] + and entity_config[CONF_DOMAIN] == msg[CONF_DOMAIN] + and entity_config[CONF_RESOURCE] == msg[CONF_RESOURCE] + ) + ), + None, + ) + + if entity_config is None: + connection.send_result(msg["id"], False) + return + + entity_configs = [ + ec for ec in config_entry.data[CONF_ENTITIES] if ec != entity_config + ] + data = {**config_entry.data, CONF_ENTITIES: entity_configs} + + hass.config_entries.async_update_entry(config_entry, data=data) + + # cleanup registries + purge_entity_registry(hass, config_entry.entry_id, data) + purge_device_registry(hass, config_entry.entry_id, data) + + connection.send_result(msg["id"]) + + +async def async_create_or_update_device_in_config_entry( + hass: HomeAssistant, + device_connection: DeviceConnectionType, + config_entry: ConfigEntry, +) -> None: + """Create or update device in config_entry according to given device_connection.""" + address = ( + device_connection.seg_id, + device_connection.addr_id, + device_connection.is_group, + ) + + device_configs = [*config_entry.data[CONF_DEVICES]] + data = {**config_entry.data, CONF_DEVICES: device_configs} + for device_config in data[CONF_DEVICES]: + if tuple(device_config[CONF_ADDRESS]) == address: + break # device already in config_entry + else: + # create new device_entry + device_config = { + CONF_ADDRESS: address, + CONF_NAME: "", + CONF_HARDWARE_SERIAL: -1, + CONF_SOFTWARE_SERIAL: -1, + CONF_HARDWARE_TYPE: -1, + } + data[CONF_DEVICES].append(device_config) + + # update device_entry + await async_update_device_config(device_connection, device_config) + + hass.config_entries.async_update_entry(config_entry, data=data) diff --git a/homeassistant/components/ld2410_ble/manifest.json b/homeassistant/components/ld2410_ble/manifest.json index a1b0e9a1398..d3e21eeae90 100644 --- a/homeassistant/components/ld2410_ble/manifest.json +++ b/homeassistant/components/ld2410_ble/manifest.json @@ -20,5 +20,5 @@ "documentation": "https://www.home-assistant.io/integrations/ld2410_ble", "integration_type": "device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.19.4", "ld2410-ble==0.1.1"] + "requirements": ["bluetooth-data-tools==1.20.0", "ld2410-ble==0.1.1"] } diff --git a/homeassistant/components/led_ble/manifest.json b/homeassistant/components/led_ble/manifest.json index e22d23fb971..1d12e355a0d 100644 --- a/homeassistant/components/led_ble/manifest.json +++ b/homeassistant/components/led_ble/manifest.json @@ -35,5 +35,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/led_ble", "iot_class": "local_polling", - "requirements": ["bluetooth-data-tools==1.19.4", "led-ble==1.0.2"] + "requirements": ["bluetooth-data-tools==1.20.0", "led-ble==1.0.2"] } diff --git a/homeassistant/components/lg_netcast/config_flow.py b/homeassistant/components/lg_netcast/config_flow.py index c4e6c75edea..4b1780d41ae 100644 --- a/homeassistant/components/lg_netcast/config_flow.py +++ b/homeassistant/components/lg_netcast/config_flow.py @@ -68,11 +68,11 @@ class LGNetCast(config_entries.ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import configuration from yaml.""" self.device_config = { - CONF_HOST: config[CONF_HOST], - CONF_NAME: config[CONF_NAME], + CONF_HOST: import_data[CONF_HOST], + CONF_NAME: import_data[CONF_NAME], } def _create_issue(): @@ -92,7 +92,7 @@ class LGNetCast(config_entries.ConfigFlow, domain=DOMAIN): ) try: - result: ConfigFlowResult = await self.async_step_authorize(config) + result: ConfigFlowResult = await self.async_step_authorize(import_data) except AbortFlow as err: if err.reason != "already_configured": async_create_issue( diff --git a/homeassistant/components/lidarr/config_flow.py b/homeassistant/components/lidarr/config_flow.py index 05d6900bb41..bc7a40c976e 100644 --- a/homeassistant/components/lidarr/config_flow.py +++ b/homeassistant/components/lidarr/config_flow.py @@ -29,7 +29,7 @@ class LidarrConfigFlow(ConfigFlow, domain=DOMAIN): self.entry: LidarrConfigEntry | None = None async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) diff --git a/homeassistant/components/lifx/icons.json b/homeassistant/components/lifx/icons.json index e32fdb5e06b..58a7c89e266 100644 --- a/homeassistant/components/lifx/icons.json +++ b/homeassistant/components/lifx/icons.json @@ -1,13 +1,31 @@ { "services": { - "set_hev_cycle_state": "mdi:led-on", - "set_state": "mdi:led-on", - "effect_pulse": "mdi:pulse", - "effect_colorloop": "mdi:looks", - "effect_move": "mdi:cube-send", - "effect_flame": "mdi:fire", - "effect_morph": "mdi:shape-outline", - "effect_sky": "mdi:clouds", - "effect_stop": "mdi:stop" + "set_hev_cycle_state": { + "service": "mdi:led-on" + }, + "set_state": { + "service": "mdi:led-on" + }, + "effect_pulse": { + "service": "mdi:pulse" + }, + "effect_colorloop": { + "service": "mdi:looks" + }, + "effect_move": { + "service": "mdi:cube-send" + }, + "effect_flame": { + "service": "mdi:fire" + }, + "effect_morph": { + "service": "mdi:shape-outline" + }, + "effect_sky": { + "service": "mdi:clouds" + }, + "effect_stop": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/lifx/manifest.json b/homeassistant/components/lifx/manifest.json index 54cff7d6e1f..3ef70f16467 100644 --- a/homeassistant/components/lifx/manifest.json +++ b/homeassistant/components/lifx/manifest.json @@ -48,7 +48,7 @@ "iot_class": "local_polling", "loggers": ["aiolifx", "aiolifx_effects", "bitstring"], "requirements": [ - "aiolifx==1.0.6", + "aiolifx==1.0.9", "aiolifx-effects==0.3.2", "aiolifx-themes==0.5.0" ] diff --git a/homeassistant/components/light/icons.json b/homeassistant/components/light/icons.json index 5113834e575..df98def090e 100644 --- a/homeassistant/components/light/icons.json +++ b/homeassistant/components/light/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "toggle": "mdi:lightbulb", - "turn_off": "mdi:lightbulb-off", - "turn_on": "mdi:lightbulb-on" + "toggle": { + "service": "mdi:lightbulb" + }, + "turn_off": { + "service": "mdi:lightbulb-off" + }, + "turn_on": { + "service": "mdi:lightbulb-on" + } } } diff --git a/homeassistant/components/linkplay/__init__.py b/homeassistant/components/linkplay/__init__.py index c0fe711a61b..808f2f93ce2 100644 --- a/homeassistant/components/linkplay/__init__.py +++ b/homeassistant/components/linkplay/__init__.py @@ -1,17 +1,22 @@ """Support for LinkPlay devices.""" +from dataclasses import dataclass + +from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge -from linkplay.discovery import linkplay_factory_bridge +from linkplay.discovery import linkplay_factory_httpapi_bridge +from linkplay.exceptions import LinkPlayRequestException from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import PLATFORMS +from .utils import async_get_client_session +@dataclass class LinkPlayData: """Data for LinkPlay.""" @@ -24,16 +29,17 @@ type LinkPlayConfigEntry = ConfigEntry[LinkPlayData] async def async_setup_entry(hass: HomeAssistant, entry: LinkPlayConfigEntry) -> bool: """Async setup hass config entry. Called when an entry has been setup.""" - session = async_get_clientsession(hass) - if ( - bridge := await linkplay_factory_bridge(entry.data[CONF_HOST], session) - ) is None: + session: ClientSession = await async_get_client_session(hass) + bridge: LinkPlayBridge | None = None + + try: + bridge = await linkplay_factory_httpapi_bridge(entry.data[CONF_HOST], session) + except LinkPlayRequestException as exception: raise ConfigEntryNotReady( f"Failed to connect to LinkPlay device at {entry.data[CONF_HOST]}" - ) + ) from exception - entry.runtime_data = LinkPlayData() - entry.runtime_data.bridge = bridge + entry.runtime_data = LinkPlayData(bridge=bridge) await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) return True diff --git a/homeassistant/components/linkplay/config_flow.py b/homeassistant/components/linkplay/config_flow.py index 0f9c40d0fd4..7dfdce238ff 100644 --- a/homeassistant/components/linkplay/config_flow.py +++ b/homeassistant/components/linkplay/config_flow.py @@ -1,16 +1,22 @@ """Config flow to configure LinkPlay component.""" +import logging from typing import Any -from linkplay.discovery import linkplay_factory_bridge +from aiohttp import ClientSession +from linkplay.bridge import LinkPlayBridge +from linkplay.discovery import linkplay_factory_httpapi_bridge +from linkplay.exceptions import LinkPlayRequestException import voluptuous as vol from homeassistant.components import zeroconf from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_MODEL -from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import DOMAIN +from .utils import async_get_client_session + +_LOGGER = logging.getLogger(__name__) class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): @@ -25,10 +31,15 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle Zeroconf discovery.""" - session = async_get_clientsession(self.hass) - bridge = await linkplay_factory_bridge(discovery_info.host, session) + session: ClientSession = await async_get_client_session(self.hass) + bridge: LinkPlayBridge | None = None - if bridge is None: + try: + bridge = await linkplay_factory_httpapi_bridge(discovery_info.host, session) + except LinkPlayRequestException: + _LOGGER.exception( + "Failed to connect to LinkPlay device at %s", discovery_info.host + ) return self.async_abort(reason="cannot_connect") self.data[CONF_HOST] = discovery_info.host @@ -66,14 +77,26 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] = {} if user_input: - session = async_get_clientsession(self.hass) - bridge = await linkplay_factory_bridge(user_input[CONF_HOST], session) + session: ClientSession = await async_get_client_session(self.hass) + bridge: LinkPlayBridge | None = None + + try: + bridge = await linkplay_factory_httpapi_bridge( + user_input[CONF_HOST], session + ) + except LinkPlayRequestException: + _LOGGER.exception( + "Failed to connect to LinkPlay device at %s", user_input[CONF_HOST] + ) + errors["base"] = "cannot_connect" if bridge is not None: self.data[CONF_HOST] = user_input[CONF_HOST] self.data[CONF_MODEL] = bridge.device.name - await self.async_set_unique_id(bridge.device.uuid) + await self.async_set_unique_id( + bridge.device.uuid, raise_on_progress=False + ) self._abort_if_unique_id_configured( updates={CONF_HOST: self.data[CONF_HOST]} ) @@ -83,7 +106,6 @@ class LinkPlayConfigFlow(ConfigFlow, domain=DOMAIN): data={CONF_HOST: self.data[CONF_HOST]}, ) - errors["base"] = "cannot_connect" return self.async_show_form( step_id="user", data_schema=vol.Schema({vol.Required(CONF_HOST): str}), diff --git a/homeassistant/components/linkplay/const.py b/homeassistant/components/linkplay/const.py index 48ae225dd98..91a427d5eb8 100644 --- a/homeassistant/components/linkplay/const.py +++ b/homeassistant/components/linkplay/const.py @@ -4,3 +4,4 @@ from homeassistant.const import Platform DOMAIN = "linkplay" PLATFORMS = [Platform.MEDIA_PLAYER] +CONF_SESSION = "session" diff --git a/homeassistant/components/linkplay/manifest.json b/homeassistant/components/linkplay/manifest.json index 9ac2a9e66e6..66a719c640e 100644 --- a/homeassistant/components/linkplay/manifest.json +++ b/homeassistant/components/linkplay/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/linkplay", "integration_type": "hub", "iot_class": "local_polling", - "requirements": ["python-linkplay==0.0.6"], + "requirements": ["python-linkplay==0.0.9"], "zeroconf": ["_linkplay._tcp.local."] } diff --git a/homeassistant/components/linkplay/media_player.py b/homeassistant/components/linkplay/media_player.py index 398add235bd..0b62b4dbcee 100644 --- a/homeassistant/components/linkplay/media_player.py +++ b/homeassistant/components/linkplay/media_player.py @@ -48,6 +48,17 @@ SOURCE_MAP: dict[PlayingMode, str] = { PlayingMode.XLR: "XLR", PlayingMode.HDMI: "HDMI", PlayingMode.OPTICAL_2: "Optical 2", + PlayingMode.EXTERN_BLUETOOTH: "External Bluetooth", + PlayingMode.PHONO: "Phono", + PlayingMode.ARC: "ARC", + PlayingMode.COAXIAL_2: "Coaxial 2", + PlayingMode.TF_CARD_1: "SD Card 1", + PlayingMode.TF_CARD_2: "SD Card 2", + PlayingMode.CD: "CD", + PlayingMode.DAB: "DAB Radio", + PlayingMode.FM: "FM Radio", + PlayingMode.RCA: "RCA", + PlayingMode.UDISK: "USB", } SOURCE_MAP_INV: dict[str, PlayingMode] = {v: k for k, v in SOURCE_MAP.items()} diff --git a/homeassistant/components/linkplay/utils.py b/homeassistant/components/linkplay/utils.py index 7532c9b354a..7f15e297145 100644 --- a/homeassistant/components/linkplay/utils.py +++ b/homeassistant/components/linkplay/utils.py @@ -2,6 +2,14 @@ from typing import Final +from aiohttp import ClientSession +from linkplay.utils import async_create_unverified_client_session + +from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import Event, HomeAssistant, callback + +from .const import CONF_SESSION, DOMAIN + MANUFACTURER_ARTSOUND: Final[str] = "ArtSound" MANUFACTURER_ARYLIC: Final[str] = "Arylic" MANUFACTURER_IEAST: Final[str] = "iEAST" @@ -44,3 +52,22 @@ def get_info_from_project(project: str) -> tuple[str, str]: return MANUFACTURER_IEAST, MODELS_IEAST_AUDIOCAST_M5 case _: return MANUFACTURER_GENERIC, MODELS_GENERIC + + +async def async_get_client_session(hass: HomeAssistant) -> ClientSession: + """Get a ClientSession that can be used with LinkPlay devices.""" + hass.data.setdefault(DOMAIN, {}) + if CONF_SESSION not in hass.data[DOMAIN]: + clientsession: ClientSession = await async_create_unverified_client_session() + + @callback + def _async_close_websession(event: Event) -> None: + """Close websession.""" + clientsession.detach() + + hass.bus.async_listen_once(EVENT_HOMEASSISTANT_CLOSE, _async_close_websession) + hass.data[DOMAIN][CONF_SESSION] = clientsession + return clientsession + + session: ClientSession = hass.data[DOMAIN][CONF_SESSION] + return session diff --git a/homeassistant/components/litterrobot/icons.json b/homeassistant/components/litterrobot/icons.json index 333f309e9e8..482031f8424 100644 --- a/homeassistant/components/litterrobot/icons.json +++ b/homeassistant/components/litterrobot/icons.json @@ -40,6 +40,8 @@ } }, "services": { - "set_sleep_mode": "mdi:sleep" + "set_sleep_mode": { + "service": "mdi:sleep" + } } } diff --git a/homeassistant/components/local_file/icons.json b/homeassistant/components/local_file/icons.json index c9c92fa86c8..7b0067c6a44 100644 --- a/homeassistant/components/local_file/icons.json +++ b/homeassistant/components/local_file/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_file_path": "mdi:cog" + "update_file_path": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/local_ip/__init__.py b/homeassistant/components/local_ip/__init__.py index 45ddbed7150..72f5d4f7a43 100644 --- a/homeassistant/components/local_ip/__init__.py +++ b/homeassistant/components/local_ip/__init__.py @@ -2,11 +2,8 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv -from .const import DOMAIN, PLATFORMS - -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +from .const import PLATFORMS async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/lock/icons.json b/homeassistant/components/lock/icons.json index 009bd84a372..0b1befde9ff 100644 --- a/homeassistant/components/lock/icons.json +++ b/homeassistant/components/lock/icons.json @@ -13,8 +13,14 @@ } }, "services": { - "lock": "mdi:lock", - "open": "mdi:door-open", - "unlock": "mdi:lock-open-variant" + "lock": { + "service": "mdi:lock" + }, + "open": { + "service": "mdi:door-open" + }, + "unlock": { + "service": "mdi:lock-open-variant" + } } } diff --git a/homeassistant/components/logbook/__init__.py b/homeassistant/components/logbook/__init__.py index d520cafb80e..239a52ff7a1 100644 --- a/homeassistant/components/logbook/__init__.py +++ b/homeassistant/components/logbook/__init__.py @@ -112,7 +112,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: # away so we use the "logbook" domain domain = DOMAIN - message.hass = hass message = message.async_render(parse_result=False) async_log_entry(hass, name, message, domain, entity_id, service.context) diff --git a/homeassistant/components/logbook/icons.json b/homeassistant/components/logbook/icons.json index cd2cde8600c..a8af6427b8c 100644 --- a/homeassistant/components/logbook/icons.json +++ b/homeassistant/components/logbook/icons.json @@ -1,5 +1,7 @@ { "services": { - "log": "mdi:file-document" + "log": { + "service": "mdi:file-document" + } } } diff --git a/homeassistant/components/logbook/models.py b/homeassistant/components/logbook/models.py index 2f9b2c8e289..8fd850b26fb 100644 --- a/homeassistant/components/logbook/models.py +++ b/homeassistant/components/logbook/models.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable, Mapping from dataclasses import dataclass from functools import cached_property -from typing import TYPE_CHECKING, Any, cast +from typing import TYPE_CHECKING, Any, Final, NamedTuple, cast from sqlalchemy.engine.row import Row @@ -45,76 +45,96 @@ class LazyEventPartialState: ) -> None: """Init the lazy event.""" self.row = row - self._event_data: dict[str, Any] | None = None - self._event_data_cache = event_data_cache # We need to explicitly check for the row is EventAsRow as the unhappy path - # to fetch row.data for Row is very expensive + # to fetch row[DATA_POS] for Row is very expensive if type(row) is EventAsRow: # If its an EventAsRow we can avoid the whole # json decode process as we already have the data - self.data = row.data + self.data = row[DATA_POS] return if TYPE_CHECKING: - source = cast(str, row.event_data) + source = cast(str, row[EVENT_DATA_POS]) else: - source = row.event_data + source = row[EVENT_DATA_POS] if not source: self.data = {} - elif event_data := self._event_data_cache.get(source): + elif event_data := event_data_cache.get(source): self.data = event_data else: - self.data = self._event_data_cache[source] = cast( + self.data = event_data_cache[source] = cast( dict[str, Any], json_loads(source) ) @cached_property def event_type(self) -> EventType[Any] | str | None: """Return the event type.""" - return self.row.event_type + return self.row[EVENT_TYPE_POS] @cached_property def entity_id(self) -> str | None: """Return the entity id.""" - return self.row.entity_id + return self.row[ENTITY_ID_POS] @cached_property def state(self) -> str | None: """Return the state.""" - return self.row.state + return self.row[STATE_POS] @cached_property def context_id(self) -> str | None: """Return the context id.""" - return bytes_to_ulid_or_none(self.row.context_id_bin) + return bytes_to_ulid_or_none(self.row[CONTEXT_ID_BIN_POS]) @cached_property def context_user_id(self) -> str | None: """Return the context user id.""" - return bytes_to_uuid_hex_or_none(self.row.context_user_id_bin) + return bytes_to_uuid_hex_or_none(self.row[CONTEXT_USER_ID_BIN_POS]) @cached_property def context_parent_id(self) -> str | None: """Return the context parent id.""" - return bytes_to_ulid_or_none(self.row.context_parent_id_bin) + return bytes_to_ulid_or_none(self.row[CONTEXT_PARENT_ID_BIN_POS]) -@dataclass(slots=True, frozen=True) -class EventAsRow: - """Convert an event to a row.""" +# Row order must match the query order in queries/common.py +# --------------------------------------------------------- +ROW_ID_POS: Final = 0 +EVENT_TYPE_POS: Final = 1 +EVENT_DATA_POS: Final = 2 +TIME_FIRED_TS_POS: Final = 3 +CONTEXT_ID_BIN_POS: Final = 4 +CONTEXT_USER_ID_BIN_POS: Final = 5 +CONTEXT_PARENT_ID_BIN_POS: Final = 6 +STATE_POS: Final = 7 +ENTITY_ID_POS: Final = 8 +ICON_POS: Final = 9 +CONTEXT_ONLY_POS: Final = 10 +# - For EventAsRow, additional fields are: +DATA_POS: Final = 11 +CONTEXT_POS: Final = 12 + +class EventAsRow(NamedTuple): + """Convert an event to a row. + + This much always match the order of the columns in queries/common.py + """ + + row_id: int + event_type: EventType[Any] | str | None + event_data: str | None + time_fired_ts: float + context_id_bin: bytes + context_user_id_bin: bytes | None + context_parent_id_bin: bytes | None + state: str | None + entity_id: str | None + icon: str | None + context_only: bool | None + + # Additional fields for EventAsRow data: Mapping[str, Any] context: Context - context_id_bin: bytes - time_fired_ts: float - row_id: int - event_data: str | None = None - entity_id: str | None = None - icon: str | None = None - context_user_id_bin: bytes | None = None - context_parent_id_bin: bytes | None = None - event_type: EventType[Any] | str | None = None - state: str | None = None - context_only: None = None @callback @@ -123,14 +143,19 @@ def async_event_to_row(event: Event) -> EventAsRow: if event.event_type != EVENT_STATE_CHANGED: context = event.context return EventAsRow( - data=event.data, - context=event.context, + row_id=hash(event), event_type=event.event_type, + event_data=None, + time_fired_ts=event.time_fired_timestamp, context_id_bin=ulid_to_bytes(context.id), context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), - time_fired_ts=event.time_fired_timestamp, - row_id=hash(event), + state=None, + entity_id=None, + icon=None, + context_only=None, + data=event.data, + context=context, ) # States are prefiltered so we never get states # that are missing new_state or old_state @@ -138,14 +163,17 @@ def async_event_to_row(event: Event) -> EventAsRow: new_state: State = event.data["new_state"] context = new_state.context return EventAsRow( - data=event.data, - context=event.context, - entity_id=new_state.entity_id, - state=new_state.state, + row_id=hash(event), + event_type=None, + event_data=None, + time_fired_ts=new_state.last_updated_timestamp, context_id_bin=ulid_to_bytes(context.id), context_user_id_bin=uuid_hex_to_bytes_or_none(context.user_id), context_parent_id_bin=ulid_to_bytes_or_none(context.parent_id), - time_fired_ts=new_state.last_updated_timestamp, - row_id=hash(event), + state=new_state.state, + entity_id=new_state.entity_id, icon=new_state.attributes.get(ATTR_ICON), + context_only=None, + data=event.data, + context=context, ) diff --git a/homeassistant/components/logbook/processor.py b/homeassistant/components/logbook/processor.py index ed9888f83d0..77aa71740f1 100644 --- a/homeassistant/components/logbook/processor.py +++ b/homeassistant/components/logbook/processor.py @@ -3,11 +3,11 @@ from __future__ import annotations from collections.abc import Callable, Generator, Sequence -from contextlib import suppress from dataclasses import dataclass from datetime import datetime as dt import logging -from typing import Any +import time +from typing import TYPE_CHECKING, Any from sqlalchemy.engine import Result from sqlalchemy.engine.row import Row @@ -18,7 +18,6 @@ from homeassistant.components.recorder.models import ( bytes_to_uuid_hex_or_none, extract_event_type_ids, extract_metadata_ids, - process_datetime_to_timestamp, process_timestamp_to_utc_isoformat, ) from homeassistant.components.recorder.util import ( @@ -63,7 +62,23 @@ from .const import ( LOGBOOK_ENTRY_WHEN, ) from .helpers import is_sensor_continuous -from .models import EventAsRow, LazyEventPartialState, LogbookConfig, async_event_to_row +from .models import ( + CONTEXT_ID_BIN_POS, + CONTEXT_ONLY_POS, + CONTEXT_PARENT_ID_BIN_POS, + CONTEXT_POS, + CONTEXT_USER_ID_BIN_POS, + ENTITY_ID_POS, + EVENT_TYPE_POS, + ICON_POS, + ROW_ID_POS, + STATE_POS, + TIME_FIRED_TS_POS, + EventAsRow, + LazyEventPartialState, + LogbookConfig, + async_event_to_row, +) from .queries import statement_for_request from .queries.common import PSEUDO_EVENT_STATE_CHANGED @@ -82,7 +97,7 @@ class LogbookRun: event_cache: EventCache entity_name_cache: EntityNameCache include_entity_name: bool - format_time: Callable[[Row | EventAsRow], Any] + timestamp: bool memoize_new_contexts: bool = True @@ -111,16 +126,13 @@ class EventProcessor: self.context_id = context_id logbook_config: LogbookConfig = hass.data[DOMAIN] self.filters: Filters | None = logbook_config.sqlalchemy_filter - format_time = ( - _row_time_fired_timestamp if timestamp else _row_time_fired_isoformat - ) self.logbook_run = LogbookRun( context_lookup={None: None}, external_events=logbook_config.external_events, event_cache=EventCache({}), entity_name_cache=EntityNameCache(self.hass), include_entity_name=include_entity_name, - format_time=format_time, + timestamp=timestamp, ) self.context_augmenter = ContextAugmenter(self.logbook_run) @@ -199,26 +211,30 @@ def _humanify( continuous_sensors: dict[str, bool] = {} context_lookup = logbook_run.context_lookup external_events = logbook_run.external_events - event_cache = logbook_run.event_cache - entity_name_cache = logbook_run.entity_name_cache + event_cache_get = logbook_run.event_cache.get + entity_name_cache_get = logbook_run.entity_name_cache.get include_entity_name = logbook_run.include_entity_name - format_time = logbook_run.format_time + timestamp = logbook_run.timestamp memoize_new_contexts = logbook_run.memoize_new_contexts + get_context = context_augmenter.get_context + context_id_bin: bytes + data: dict[str, Any] # Process rows for row in rows: - context_id_bin: bytes = row.context_id_bin + context_id_bin = row[CONTEXT_ID_BIN_POS] if memoize_new_contexts and context_id_bin not in context_lookup: context_lookup[context_id_bin] = row - if row.context_only: + if row[CONTEXT_ONLY_POS]: continue - event_type = row.event_type - + event_type = row[EVENT_TYPE_POS] if event_type == EVENT_CALL_SERVICE: continue + if event_type is PSEUDO_EVENT_STATE_CHANGED: - entity_id = row.entity_id - assert entity_id is not None + entity_id = row[ENTITY_ID_POS] + if TYPE_CHECKING: + assert entity_id is not None # Skip continuous sensors if ( is_continuous := continuous_sensors.get(entity_id) @@ -229,50 +245,69 @@ def _humanify( continue data = { - LOGBOOK_ENTRY_WHEN: format_time(row), - LOGBOOK_ENTRY_STATE: row.state, + LOGBOOK_ENTRY_STATE: row[STATE_POS], LOGBOOK_ENTRY_ENTITY_ID: entity_id, } if include_entity_name: - data[LOGBOOK_ENTRY_NAME] = entity_name_cache.get(entity_id) - if icon := row.icon: + data[LOGBOOK_ENTRY_NAME] = entity_name_cache_get(entity_id) + if icon := row[ICON_POS]: data[LOGBOOK_ENTRY_ICON] = icon - context_augmenter.augment(data, row, context_id_bin) - yield data - elif event_type in external_events: domain, describe_event = external_events[event_type] try: - data = describe_event(event_cache.get(row)) + data = describe_event(event_cache_get(row)) except Exception: _LOGGER.exception( "Error with %s describe event for %s", domain, event_type ) continue - data[LOGBOOK_ENTRY_WHEN] = format_time(row) data[LOGBOOK_ENTRY_DOMAIN] = domain - context_augmenter.augment(data, row, context_id_bin) - yield data elif event_type == EVENT_LOGBOOK_ENTRY: - event = event_cache.get(row) + event = event_cache_get(row) if not (event_data := event.data): continue entry_domain = event_data.get(ATTR_DOMAIN) entry_entity_id = event_data.get(ATTR_ENTITY_ID) if entry_domain is None and entry_entity_id is not None: - with suppress(IndexError): - entry_domain = split_entity_id(str(entry_entity_id))[0] + entry_domain = split_entity_id(str(entry_entity_id))[0] data = { - LOGBOOK_ENTRY_WHEN: format_time(row), LOGBOOK_ENTRY_NAME: event_data.get(ATTR_NAME), LOGBOOK_ENTRY_MESSAGE: event_data.get(ATTR_MESSAGE), LOGBOOK_ENTRY_DOMAIN: entry_domain, LOGBOOK_ENTRY_ENTITY_ID: entry_entity_id, } - context_augmenter.augment(data, row, context_id_bin) - yield data + + else: + continue + + time_fired_ts = row[TIME_FIRED_TS_POS] + if timestamp: + when = time_fired_ts or time.time() + else: + when = process_timestamp_to_utc_isoformat( + dt_util.utc_from_timestamp(time_fired_ts) or dt_util.utcnow() + ) + data[LOGBOOK_ENTRY_WHEN] = when + + if context_user_id_bin := row[CONTEXT_USER_ID_BIN_POS]: + data[CONTEXT_USER_ID] = bytes_to_uuid_hex_or_none(context_user_id_bin) + + # Augment context if its available but not if the context is the same as the row + # or if the context is the parent of the row + if (context_row := get_context(context_id_bin, row)) and not ( + (row is context_row or _rows_ids_match(row, context_row)) + and ( + not (context_parent := row[CONTEXT_PARENT_ID_BIN_POS]) + or not (context_row := get_context(context_parent, context_row)) + or row is context_row + or _rows_ids_match(row, context_row) + ) + ): + context_augmenter.augment(data, context_row) + + yield data class ContextAugmenter: @@ -286,52 +321,28 @@ class ContextAugmenter: self.event_cache = logbook_run.event_cache self.include_entity_name = logbook_run.include_entity_name - def _get_context_row( - self, context_id_bin: bytes | None, row: Row | EventAsRow + def get_context( + self, context_id_bin: bytes | None, row: Row | EventAsRow | None ) -> Row | EventAsRow | None: """Get the context row from the id or row context.""" if context_id_bin is not None and ( context_row := self.context_lookup.get(context_id_bin) ): return context_row - if (context := getattr(row, "context", None)) is not None and ( - origin_event := context.origin_event - ) is not None: + if ( + type(row) is EventAsRow + and (context := row[CONTEXT_POS]) is not None + and (origin_event := context.origin_event) is not None + ): return async_event_to_row(origin_event) return None - def augment( - self, data: dict[str, Any], row: Row | EventAsRow, context_id_bin: bytes | None - ) -> None: + def augment(self, data: dict[str, Any], context_row: Row | EventAsRow) -> None: """Augment data from the row and cache.""" - if context_user_id_bin := row.context_user_id_bin: - data[CONTEXT_USER_ID] = bytes_to_uuid_hex_or_none(context_user_id_bin) - - if not (context_row := self._get_context_row(context_id_bin, row)): - return - - if _rows_match(row, context_row): - # This is the first event with the given ID. Was it directly caused by - # a parent event? - context_parent_id_bin = row.context_parent_id_bin - if ( - not context_parent_id_bin - or ( - context_row := self._get_context_row( - context_parent_id_bin, context_row - ) - ) - is None - ): - return - # Ensure the (parent) context_event exists and is not the root cause of - # this log entry. - if _rows_match(row, context_row): - return - event_type = context_row.event_type + event_type = context_row[EVENT_TYPE_POS] # State change - if context_entity_id := context_row.entity_id: - data[CONTEXT_STATE] = context_row.state + if context_entity_id := context_row[ENTITY_ID_POS]: + data[CONTEXT_STATE] = context_row[STATE_POS] data[CONTEXT_ENTITY_ID] = context_entity_id if self.include_entity_name: data[CONTEXT_ENTITY_ID_NAME] = self.entity_name_cache.get( @@ -374,23 +385,9 @@ class ContextAugmenter: data[CONTEXT_ENTITY_ID_NAME] = self.entity_name_cache.get(attr_entity_id) -def _rows_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool: +def _rows_ids_match(row: Row | EventAsRow, other_row: Row | EventAsRow) -> bool: """Check of rows match by using the same method as Events __hash__.""" - return bool( - row is other_row or (row_id := row.row_id) and row_id == other_row.row_id - ) - - -def _row_time_fired_isoformat(row: Row | EventAsRow) -> str: - """Convert the row timed_fired to isoformat.""" - return process_timestamp_to_utc_isoformat( - dt_util.utc_from_timestamp(row.time_fired_ts) or dt_util.utcnow() - ) - - -def _row_time_fired_timestamp(row: Row | EventAsRow) -> float: - """Convert the row timed_fired to timestamp.""" - return row.time_fired_ts or process_datetime_to_timestamp(dt_util.utcnow()) + return bool((row_id := row[ROW_ID_POS]) and row_id == other_row[ROW_ID_POS]) class EntityNameCache: diff --git a/homeassistant/components/logbook/rest_api.py b/homeassistant/components/logbook/rest_api.py index bd9efe7aba3..c7ba196275b 100644 --- a/homeassistant/components/logbook/rest_api.py +++ b/homeassistant/components/logbook/rest_api.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable from datetime import timedelta from http import HTTPStatus -from typing import Any, cast +from typing import Any from aiohttp import web import voluptuous as vol @@ -109,13 +109,6 @@ class LogbookView(HomeAssistantView): def json_events() -> web.Response: """Fetch events and generate JSON.""" - return self.json( - event_processor.get_events( - start_day, - end_day, - ) - ) + return self.json(event_processor.get_events(start_day, end_day)) - return cast( - web.Response, await get_instance(hass).async_add_executor_job(json_events) - ) + return await get_instance(hass).async_add_executor_job(json_events) diff --git a/homeassistant/components/logbook/websocket_api.py b/homeassistant/components/logbook/websocket_api.py index cac58971cde..b776ad6303d 100644 --- a/homeassistant/components/logbook/websocket_api.py +++ b/homeassistant/components/logbook/websocket_api.py @@ -81,7 +81,6 @@ async def _async_send_historical_events( msg_id: int, start_time: dt, end_time: dt, - formatter: Callable[[int, Any], dict[str, Any]], event_processor: EventProcessor, partial: bool, force_send: bool = False, @@ -109,7 +108,6 @@ async def _async_send_historical_events( msg_id, start_time, end_time, - formatter, event_processor, partial, ) @@ -131,7 +129,6 @@ async def _async_send_historical_events( msg_id, recent_query_start, end_time, - formatter, event_processor, partial=True, ) @@ -143,7 +140,6 @@ async def _async_send_historical_events( msg_id, start_time, recent_query_start, - formatter, event_processor, partial, ) @@ -164,7 +160,6 @@ async def _async_get_ws_stream_events( msg_id: int, start_time: dt, end_time: dt, - formatter: Callable[[int, Any], dict[str, Any]], event_processor: EventProcessor, partial: bool, ) -> tuple[bytes, dt | None]: @@ -174,7 +169,6 @@ async def _async_get_ws_stream_events( msg_id, start_time, end_time, - formatter, event_processor, partial, ) @@ -195,7 +189,6 @@ def _ws_stream_get_events( msg_id: int, start_day: dt, end_day: dt, - formatter: Callable[[int, Any], dict[str, Any]], event_processor: EventProcessor, partial: bool, ) -> tuple[bytes, dt | None]: @@ -211,7 +204,7 @@ def _ws_stream_get_events( # data in case the UI needs to show that historical # data is still loading in the future message["partial"] = True - return json_bytes(formatter(msg_id, message)), last_time + return json_bytes(messages.event_message(msg_id, message)), last_time async def _async_events_consumer( @@ -318,7 +311,6 @@ async def ws_event_stream( msg_id, start_time, end_time, - messages.event_message, event_processor, partial=False, ) @@ -385,7 +377,6 @@ async def ws_event_stream( msg_id, start_time, subscriptions_setup_complete_time, - messages.event_message, event_processor, partial=True, # Force a send since the wait for the sync task @@ -431,7 +422,6 @@ async def ws_event_stream( # we could fetch the same event twice (last_event_time or start_time) + timedelta(microseconds=1), subscriptions_setup_complete_time, - messages.event_message, event_processor, partial=False, ) diff --git a/homeassistant/components/logger/icons.json b/homeassistant/components/logger/icons.json index 305dd3ece91..1542e1e5ad3 100644 --- a/homeassistant/components/logger/icons.json +++ b/homeassistant/components/logger/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_default_level": "mdi:cog-outline", - "set_level": "mdi:cog-outline" + "set_default_level": { + "service": "mdi:cog-outline" + }, + "set_level": { + "service": "mdi:cog-outline" + } } } diff --git a/homeassistant/components/logi_circle/__init__.py b/homeassistant/components/logi_circle/__init__.py deleted file mode 100644 index 0713bcc438e..00000000000 --- a/homeassistant/components/logi_circle/__init__.py +++ /dev/null @@ -1,271 +0,0 @@ -"""Support for Logi Circle devices.""" - -import asyncio - -from aiohttp.client_exceptions import ClientResponseError -from logi_circle import LogiCircle -from logi_circle.exception import AuthorizationFailed -import voluptuous as vol - -from homeassistant import config_entries -from homeassistant.components import persistent_notification -from homeassistant.components.camera import ATTR_FILENAME -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_ENTITY_ID, - ATTR_MODE, - CONF_API_KEY, - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_MONITORED_CONDITIONS, - CONF_SENSORS, - EVENT_HOMEASSISTANT_STOP, - Platform, -) -from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv, issue_registry as ir -from homeassistant.helpers.dispatcher import async_dispatcher_send -from homeassistant.helpers.typing import ConfigType - -from . import config_flow -from .const import ( - CONF_REDIRECT_URI, - DATA_LOGI, - DEFAULT_CACHEDB, - DOMAIN, - LED_MODE_KEY, - RECORDING_MODE_KEY, - SIGNAL_LOGI_CIRCLE_RECONFIGURE, - SIGNAL_LOGI_CIRCLE_RECORD, - SIGNAL_LOGI_CIRCLE_SNAPSHOT, -) -from .sensor import SENSOR_TYPES - -NOTIFICATION_ID = "logi_circle_notification" -NOTIFICATION_TITLE = "Logi Circle Setup" - -_TIMEOUT = 15 # seconds - -SERVICE_SET_CONFIG = "set_config" -SERVICE_LIVESTREAM_SNAPSHOT = "livestream_snapshot" -SERVICE_LIVESTREAM_RECORD = "livestream_record" - -ATTR_VALUE = "value" -ATTR_DURATION = "duration" - -PLATFORMS = [Platform.CAMERA, Platform.SENSOR] - -SENSOR_KEYS = [desc.key for desc in SENSOR_TYPES] - -SENSOR_SCHEMA = vol.Schema( - { - vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_KEYS): vol.All( - cv.ensure_list, [vol.In(SENSOR_KEYS)] - ) - } -) - -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_CLIENT_ID): cv.string, - vol.Required(CONF_CLIENT_SECRET): cv.string, - vol.Required(CONF_API_KEY): cv.string, - vol.Required(CONF_REDIRECT_URI): cv.string, - vol.Optional(CONF_SENSORS, default={}): SENSOR_SCHEMA, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) - -LOGI_CIRCLE_SERVICE_SET_CONFIG = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_MODE): vol.In([LED_MODE_KEY, RECORDING_MODE_KEY]), - vol.Required(ATTR_VALUE): cv.boolean, - } -) - -LOGI_CIRCLE_SERVICE_SNAPSHOT = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_FILENAME): cv.template, - } -) - -LOGI_CIRCLE_SERVICE_RECORD = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_FILENAME): cv.template, - vol.Required(ATTR_DURATION): cv.positive_int, - } -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up configured Logi Circle component.""" - if DOMAIN not in config: - return True - - conf = config[DOMAIN] - - config_flow.register_flow_implementation( - hass, - DOMAIN, - client_id=conf[CONF_CLIENT_ID], - client_secret=conf[CONF_CLIENT_SECRET], - api_key=conf[CONF_API_KEY], - redirect_uri=conf[CONF_REDIRECT_URI], - sensors=conf[CONF_SENSORS], - ) - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_IMPORT} - ) - ) - - return True - - -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Set up Logi Circle from a config entry.""" - ir.async_create_issue( - hass, - DOMAIN, - DOMAIN, - breaks_in_ha_version="2024.9.0", - is_fixable=False, - severity=ir.IssueSeverity.WARNING, - translation_key="integration_removed", - translation_placeholders={ - "entries": "/config/integrations/integration/logi_circle", - }, - ) - - logi_circle = LogiCircle( - client_id=entry.data[CONF_CLIENT_ID], - client_secret=entry.data[CONF_CLIENT_SECRET], - api_key=entry.data[CONF_API_KEY], - redirect_uri=entry.data[CONF_REDIRECT_URI], - cache_file=hass.config.path(DEFAULT_CACHEDB), - ) - - if not logi_circle.authorized: - persistent_notification.create( - hass, - ( - "Error: The cached access tokens are missing from" - f" {DEFAULT_CACHEDB}.
Please unload then re-add the Logi Circle" - " integration to resolve." - ), - title=NOTIFICATION_TITLE, - notification_id=NOTIFICATION_ID, - ) - return False - - try: - async with asyncio.timeout(_TIMEOUT): - # Ensure the cameras property returns the same Camera objects for - # all devices. Performs implicit login and session validation. - await logi_circle.synchronize_cameras() - except AuthorizationFailed: - persistent_notification.create( - hass, - ( - "Error: Failed to obtain an access token from the cached " - "refresh token.
" - "Token may have expired or been revoked.
" - "Please unload then re-add the Logi Circle integration to resolve" - ), - title=NOTIFICATION_TITLE, - notification_id=NOTIFICATION_ID, - ) - return False - except TimeoutError: - # The TimeoutError exception object returns nothing when casted to a - # string, so we'll handle it separately. - err = f"{_TIMEOUT}s timeout exceeded when connecting to Logi Circle API" - persistent_notification.create( - hass, - f"Error: {err}
You will need to restart hass after fixing.", - title=NOTIFICATION_TITLE, - notification_id=NOTIFICATION_ID, - ) - return False - except ClientResponseError as ex: - persistent_notification.create( - hass, - f"Error: {ex}
You will need to restart hass after fixing.", - title=NOTIFICATION_TITLE, - notification_id=NOTIFICATION_ID, - ) - return False - - hass.data[DATA_LOGI] = logi_circle - - await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) - - async def service_handler(service: ServiceCall) -> None: - """Dispatch service calls to target entities.""" - params = dict(service.data) - - if service.service == SERVICE_SET_CONFIG: - async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_RECONFIGURE, params) - if service.service == SERVICE_LIVESTREAM_SNAPSHOT: - async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_SNAPSHOT, params) - if service.service == SERVICE_LIVESTREAM_RECORD: - async_dispatcher_send(hass, SIGNAL_LOGI_CIRCLE_RECORD, params) - - hass.services.async_register( - DOMAIN, - SERVICE_SET_CONFIG, - service_handler, - schema=LOGI_CIRCLE_SERVICE_SET_CONFIG, - ) - - hass.services.async_register( - DOMAIN, - SERVICE_LIVESTREAM_SNAPSHOT, - service_handler, - schema=LOGI_CIRCLE_SERVICE_SNAPSHOT, - ) - - hass.services.async_register( - DOMAIN, - SERVICE_LIVESTREAM_RECORD, - service_handler, - schema=LOGI_CIRCLE_SERVICE_RECORD, - ) - - async def shut_down(event=None): - """Close Logi Circle aiohttp session.""" - await logi_circle.auth_provider.close() - - entry.async_on_unload( - hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, shut_down) - ) - - return True - - -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: - """Unload a config entry.""" - if all( - config_entry.state is config_entries.ConfigEntryState.NOT_LOADED - for config_entry in hass.config_entries.async_entries(DOMAIN) - if config_entry.entry_id != entry.entry_id - ): - ir.async_delete_issue(hass, DOMAIN, DOMAIN) - - unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS) - - logi_circle = hass.data.pop(DATA_LOGI) - - # Tell API wrapper to close all aiohttp sessions, invalidate WS connections - # and clear all locally cached tokens - await logi_circle.auth_provider.clear_authorization() - - return unload_ok diff --git a/homeassistant/components/logi_circle/camera.py b/homeassistant/components/logi_circle/camera.py deleted file mode 100644 index ad31713d734..00000000000 --- a/homeassistant/components/logi_circle/camera.py +++ /dev/null @@ -1,202 +0,0 @@ -"""Support to the Logi Circle cameras.""" - -from __future__ import annotations - -from datetime import timedelta -import logging - -from homeassistant.components.camera import Camera, CameraEntityFeature -from homeassistant.components.ffmpeg import get_ffmpeg_manager -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_BATTERY_CHARGING, - ATTR_BATTERY_LEVEL, - ATTR_ENTITY_ID, - STATE_OFF, - STATE_ON, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType - -from .const import ( - ATTRIBUTION, - DEVICE_BRAND, - DOMAIN as LOGI_CIRCLE_DOMAIN, - LED_MODE_KEY, - RECORDING_MODE_KEY, - SIGNAL_LOGI_CIRCLE_RECONFIGURE, - SIGNAL_LOGI_CIRCLE_RECORD, - SIGNAL_LOGI_CIRCLE_SNAPSHOT, -) - -_LOGGER = logging.getLogger(__name__) - -SCAN_INTERVAL = timedelta(seconds=60) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a Logi Circle Camera. Obsolete.""" - _LOGGER.warning("Logi Circle no longer works with camera platform configuration") - - -async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Set up a Logi Circle Camera based on a config entry.""" - devices = await hass.data[LOGI_CIRCLE_DOMAIN].cameras - ffmpeg = get_ffmpeg_manager(hass) - - cameras = [LogiCam(device, ffmpeg) for device in devices] - - async_add_entities(cameras, True) - - -class LogiCam(Camera): - """An implementation of a Logi Circle camera.""" - - _attr_attribution = ATTRIBUTION - _attr_should_poll = True # Cameras default to False - _attr_supported_features = CameraEntityFeature.ON_OFF - _attr_has_entity_name = True - _attr_name = None - - def __init__(self, camera, ffmpeg): - """Initialize Logi Circle camera.""" - super().__init__() - self._camera = camera - self._has_battery = camera.supports_feature("battery_level") - self._ffmpeg = ffmpeg - self._listeners = [] - self._attr_unique_id = camera.mac_address - self._attr_device_info = DeviceInfo( - identifiers={(LOGI_CIRCLE_DOMAIN, camera.id)}, - manufacturer=DEVICE_BRAND, - model=camera.model_name, - name=camera.name, - sw_version=camera.firmware, - ) - - async def async_added_to_hass(self) -> None: - """Connect camera methods to signals.""" - - def _dispatch_proxy(method): - """Expand parameters & filter entity IDs.""" - - async def _call(params): - entity_ids = params.get(ATTR_ENTITY_ID) - filtered_params = { - k: v for k, v in params.items() if k != ATTR_ENTITY_ID - } - if entity_ids is None or self.entity_id in entity_ids: - await method(**filtered_params) - - return _call - - self._listeners.extend( - [ - async_dispatcher_connect( - self.hass, - SIGNAL_LOGI_CIRCLE_RECONFIGURE, - _dispatch_proxy(self.set_config), - ), - async_dispatcher_connect( - self.hass, - SIGNAL_LOGI_CIRCLE_SNAPSHOT, - _dispatch_proxy(self.livestream_snapshot), - ), - async_dispatcher_connect( - self.hass, - SIGNAL_LOGI_CIRCLE_RECORD, - _dispatch_proxy(self.download_livestream), - ), - ] - ) - - async def async_will_remove_from_hass(self) -> None: - """Disconnect dispatcher listeners when removed.""" - for detach in self._listeners: - detach() - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - state = { - "battery_saving_mode": ( - STATE_ON if self._camera.battery_saving else STATE_OFF - ), - "microphone_gain": self._camera.microphone_gain, - } - - # Add battery attributes if camera is battery-powered - if self._has_battery: - state[ATTR_BATTERY_CHARGING] = self._camera.charging - state[ATTR_BATTERY_LEVEL] = self._camera.battery_level - - return state - - async def async_camera_image( - self, width: int | None = None, height: int | None = None - ) -> bytes | None: - """Return a still image from the camera.""" - return await self._camera.live_stream.download_jpeg() - - async def async_turn_off(self) -> None: - """Disable streaming mode for this camera.""" - await self._camera.set_config("streaming", False) - - async def async_turn_on(self) -> None: - """Enable streaming mode for this camera.""" - await self._camera.set_config("streaming", True) - - async def set_config(self, mode, value): - """Set an configuration property for the target camera.""" - if mode == LED_MODE_KEY: - await self._camera.set_config("led", value) - if mode == RECORDING_MODE_KEY: - await self._camera.set_config("recording_disabled", not value) - - async def download_livestream(self, filename, duration): - """Download a recording from the camera's livestream.""" - # Render filename from template. - filename.hass = self.hass - stream_file = filename.async_render(variables={ATTR_ENTITY_ID: self.entity_id}) - - # Respect configured allowed paths. - if not self.hass.config.is_allowed_path(stream_file): - _LOGGER.error("Can't write %s, no access to path!", stream_file) - return - - await self._camera.live_stream.download_rtsp( - filename=stream_file, - duration=timedelta(seconds=duration), - ffmpeg_bin=self._ffmpeg.binary, - ) - - async def livestream_snapshot(self, filename): - """Download a still frame from the camera's livestream.""" - # Render filename from template. - filename.hass = self.hass - snapshot_file = filename.async_render( - variables={ATTR_ENTITY_ID: self.entity_id} - ) - - # Respect configured allowed paths. - if not self.hass.config.is_allowed_path(snapshot_file): - _LOGGER.error("Can't write %s, no access to path!", snapshot_file) - return - - await self._camera.live_stream.download_jpeg( - filename=snapshot_file, refresh=True - ) - - async def async_update(self) -> None: - """Update camera entity and refresh attributes.""" - await self._camera.update() diff --git a/homeassistant/components/logi_circle/config_flow.py b/homeassistant/components/logi_circle/config_flow.py deleted file mode 100644 index 6c1a549aa04..00000000000 --- a/homeassistant/components/logi_circle/config_flow.py +++ /dev/null @@ -1,206 +0,0 @@ -"""Config flow to configure Logi Circle component.""" - -import asyncio -from collections import OrderedDict -from http import HTTPStatus - -from logi_circle import LogiCircle -from logi_circle.exception import AuthorizationFailed -import voluptuous as vol - -from homeassistant.components.http import KEY_HASS, HomeAssistantView -from homeassistant.config_entries import ConfigFlow -from homeassistant.const import ( - CONF_API_KEY, - CONF_CLIENT_ID, - CONF_CLIENT_SECRET, - CONF_SENSORS, -) -from homeassistant.core import callback - -from .const import CONF_REDIRECT_URI, DEFAULT_CACHEDB, DOMAIN - -_TIMEOUT = 15 # seconds - -DATA_FLOW_IMPL = "logi_circle_flow_implementation" -EXTERNAL_ERRORS = "logi_errors" -AUTH_CALLBACK_PATH = "/api/logi_circle" -AUTH_CALLBACK_NAME = "api:logi_circle" - - -@callback -def register_flow_implementation( - hass, domain, client_id, client_secret, api_key, redirect_uri, sensors -): - """Register a flow implementation. - - domain: Domain of the component responsible for the implementation. - client_id: Client ID. - client_secret: Client secret. - api_key: API key issued by Logitech. - redirect_uri: Auth callback redirect URI. - sensors: Sensor config. - """ - if DATA_FLOW_IMPL not in hass.data: - hass.data[DATA_FLOW_IMPL] = OrderedDict() - - hass.data[DATA_FLOW_IMPL][domain] = { - CONF_CLIENT_ID: client_id, - CONF_CLIENT_SECRET: client_secret, - CONF_API_KEY: api_key, - CONF_REDIRECT_URI: redirect_uri, - CONF_SENSORS: sensors, - EXTERNAL_ERRORS: None, - } - - -class LogiCircleFlowHandler(ConfigFlow, domain=DOMAIN): - """Config flow for Logi Circle component.""" - - VERSION = 1 - - def __init__(self) -> None: - """Initialize flow.""" - self.flow_impl = None - - async def async_step_import(self, user_input=None): - """Handle external yaml configuration.""" - self._async_abort_entries_match() - - self.flow_impl = DOMAIN - - return await self.async_step_auth() - - async def async_step_user(self, user_input=None): - """Handle a flow start.""" - flows = self.hass.data.get(DATA_FLOW_IMPL, {}) - - self._async_abort_entries_match() - - if not flows: - return self.async_abort(reason="missing_configuration") - - if len(flows) == 1: - self.flow_impl = list(flows)[0] - return await self.async_step_auth() - - if user_input is not None: - self.flow_impl = user_input["flow_impl"] - return await self.async_step_auth() - - return self.async_show_form( - step_id="user", - data_schema=vol.Schema({vol.Required("flow_impl"): vol.In(list(flows))}), - ) - - async def async_step_auth(self, user_input=None): - """Create an entry for auth.""" - if self._async_current_entries(): - return self.async_abort(reason="external_setup") - - external_error = self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS] - errors = {} - if external_error: - # Handle error from another flow - errors["base"] = external_error - self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS] = None - elif user_input is not None: - errors["base"] = "follow_link" - - url = self._get_authorization_url() - - return self.async_show_form( - step_id="auth", - description_placeholders={"authorization_url": url}, - errors=errors, - ) - - def _get_authorization_url(self): - """Create temporary Circle session and generate authorization url.""" - flow = self.hass.data[DATA_FLOW_IMPL][self.flow_impl] - client_id = flow[CONF_CLIENT_ID] - client_secret = flow[CONF_CLIENT_SECRET] - api_key = flow[CONF_API_KEY] - redirect_uri = flow[CONF_REDIRECT_URI] - - logi_session = LogiCircle( - client_id=client_id, - client_secret=client_secret, - api_key=api_key, - redirect_uri=redirect_uri, - ) - - self.hass.http.register_view(LogiCircleAuthCallbackView()) - - return logi_session.authorize_url - - async def async_step_code(self, code=None): - """Received code for authentication.""" - self._async_abort_entries_match() - - return await self._async_create_session(code) - - async def _async_create_session(self, code): - """Create Logi Circle session and entries.""" - flow = self.hass.data[DATA_FLOW_IMPL][DOMAIN] - client_id = flow[CONF_CLIENT_ID] - client_secret = flow[CONF_CLIENT_SECRET] - api_key = flow[CONF_API_KEY] - redirect_uri = flow[CONF_REDIRECT_URI] - sensors = flow[CONF_SENSORS] - - logi_session = LogiCircle( - client_id=client_id, - client_secret=client_secret, - api_key=api_key, - redirect_uri=redirect_uri, - cache_file=self.hass.config.path(DEFAULT_CACHEDB), - ) - - try: - async with asyncio.timeout(_TIMEOUT): - await logi_session.authorize(code) - except AuthorizationFailed: - (self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS]) = "invalid_auth" - return self.async_abort(reason="external_error") - except TimeoutError: - ( - self.hass.data[DATA_FLOW_IMPL][DOMAIN][EXTERNAL_ERRORS] - ) = "authorize_url_timeout" - return self.async_abort(reason="external_error") - - account_id = (await logi_session.account)["accountId"] - await logi_session.close() - return self.async_create_entry( - title=f"Logi Circle ({account_id})", - data={ - CONF_CLIENT_ID: client_id, - CONF_CLIENT_SECRET: client_secret, - CONF_API_KEY: api_key, - CONF_REDIRECT_URI: redirect_uri, - CONF_SENSORS: sensors, - }, - ) - - -class LogiCircleAuthCallbackView(HomeAssistantView): - """Logi Circle Authorization Callback View.""" - - requires_auth = False - url = AUTH_CALLBACK_PATH - name = AUTH_CALLBACK_NAME - - async def get(self, request): - """Receive authorization code.""" - hass = request.app[KEY_HASS] - if "code" in request.query: - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, context={"source": "code"}, data=request.query["code"] - ) - ) - return self.json_message("Authorisation code saved") - return self.json_message( - "Authorisation code missing from query string", - status_code=HTTPStatus.BAD_REQUEST, - ) diff --git a/homeassistant/components/logi_circle/const.py b/homeassistant/components/logi_circle/const.py deleted file mode 100644 index e144f47ce4e..00000000000 --- a/homeassistant/components/logi_circle/const.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Constants in Logi Circle component.""" - -from __future__ import annotations - -DOMAIN = "logi_circle" -DATA_LOGI = DOMAIN - -CONF_REDIRECT_URI = "redirect_uri" - -DEFAULT_CACHEDB = ".logi_cache.pickle" - - -LED_MODE_KEY = "LED" -RECORDING_MODE_KEY = "RECORDING_MODE" - -SIGNAL_LOGI_CIRCLE_RECONFIGURE = "logi_circle_reconfigure" -SIGNAL_LOGI_CIRCLE_SNAPSHOT = "logi_circle_snapshot" -SIGNAL_LOGI_CIRCLE_RECORD = "logi_circle_record" - -# Attribution -ATTRIBUTION = "Data provided by circle.logi.com" -DEVICE_BRAND = "Logitech" diff --git a/homeassistant/components/logi_circle/icons.json b/homeassistant/components/logi_circle/icons.json deleted file mode 100644 index 9289746d375..00000000000 --- a/homeassistant/components/logi_circle/icons.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "services": { - "set_config": "mdi:cog", - "livestream_snapshot": "mdi:camera", - "livestream_record": "mdi:record-rec" - } -} diff --git a/homeassistant/components/logi_circle/manifest.json b/homeassistant/components/logi_circle/manifest.json deleted file mode 100644 index f4f65b22505..00000000000 --- a/homeassistant/components/logi_circle/manifest.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "domain": "logi_circle", - "name": "Logi Circle", - "codeowners": ["@evanjd"], - "config_flow": true, - "dependencies": ["ffmpeg", "http"], - "documentation": "https://www.home-assistant.io/integrations/logi_circle", - "iot_class": "cloud_polling", - "loggers": ["logi_circle"], - "requirements": ["logi-circle==0.2.3"] -} diff --git a/homeassistant/components/logi_circle/sensor.py b/homeassistant/components/logi_circle/sensor.py deleted file mode 100644 index 121cb8848ae..00000000000 --- a/homeassistant/components/logi_circle/sensor.py +++ /dev/null @@ -1,164 +0,0 @@ -"""Support for Logi Circle sensors.""" - -from __future__ import annotations - -import logging -from typing import Any - -from homeassistant.components.sensor import ( - SensorDeviceClass, - SensorEntity, - SensorEntityDescription, -) -from homeassistant.config_entries import ConfigEntry -from homeassistant.const import ( - ATTR_BATTERY_CHARGING, - CONF_MONITORED_CONDITIONS, - CONF_SENSORS, - PERCENTAGE, - STATE_OFF, - STATE_ON, -) -from homeassistant.core import HomeAssistant -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.util.dt import as_local - -from .const import ATTRIBUTION, DEVICE_BRAND, DOMAIN as LOGI_CIRCLE_DOMAIN - -_LOGGER = logging.getLogger(__name__) - - -SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( - SensorEntityDescription( - key="battery_level", - native_unit_of_measurement=PERCENTAGE, - device_class=SensorDeviceClass.BATTERY, - ), - SensorEntityDescription( - key="last_activity_time", - translation_key="last_activity", - icon="mdi:history", - ), - SensorEntityDescription( - key="recording", - translation_key="recording_mode", - icon="mdi:eye", - ), - SensorEntityDescription( - key="signal_strength_category", - translation_key="wifi_signal_category", - icon="mdi:wifi", - ), - SensorEntityDescription( - key="signal_strength_percentage", - translation_key="wifi_signal_strength", - native_unit_of_measurement=PERCENTAGE, - icon="mdi:wifi", - ), - SensorEntityDescription( - key="streaming", - translation_key="streaming_mode", - icon="mdi:camera", - ), -) - - -async def async_setup_platform( - hass: HomeAssistant, - config: ConfigType, - async_add_entities: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, -) -> None: - """Set up a sensor for a Logi Circle device. Obsolete.""" - _LOGGER.warning("Logi Circle no longer works with sensor platform configuration") - - -async def async_setup_entry( - hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback -) -> None: - """Set up a Logi Circle sensor based on a config entry.""" - devices = await hass.data[LOGI_CIRCLE_DOMAIN].cameras - time_zone = str(hass.config.time_zone) - - monitored_conditions = entry.data[CONF_SENSORS].get(CONF_MONITORED_CONDITIONS) - entities = [ - LogiSensor(device, time_zone, description) - for description in SENSOR_TYPES - if description.key in monitored_conditions - for device in devices - if device.supports_feature(description.key) - ] - - async_add_entities(entities, True) - - -class LogiSensor(SensorEntity): - """A sensor implementation for a Logi Circle camera.""" - - _attr_attribution = ATTRIBUTION - _attr_has_entity_name = True - - def __init__(self, camera, time_zone, description: SensorEntityDescription) -> None: - """Initialize a sensor for Logi Circle camera.""" - self.entity_description = description - self._camera = camera - self._attr_unique_id = f"{camera.mac_address}-{description.key}" - self._activity: dict[Any, Any] = {} - self._tz = time_zone - self._attr_device_info = DeviceInfo( - identifiers={(LOGI_CIRCLE_DOMAIN, camera.id)}, - manufacturer=DEVICE_BRAND, - model=camera.model_name, - name=camera.name, - sw_version=camera.firmware, - ) - - @property - def extra_state_attributes(self): - """Return the state attributes.""" - state = { - "battery_saving_mode": ( - STATE_ON if self._camera.battery_saving else STATE_OFF - ), - "microphone_gain": self._camera.microphone_gain, - } - - if self.entity_description.key == "battery_level": - state[ATTR_BATTERY_CHARGING] = self._camera.charging - - return state - - @property - def icon(self): - """Icon to use in the frontend, if any.""" - sensor_type = self.entity_description.key - if sensor_type == "recording_mode" and self._attr_native_value is not None: - return "mdi:eye" if self._attr_native_value == STATE_ON else "mdi:eye-off" - if sensor_type == "streaming_mode" and self._attr_native_value is not None: - return ( - "mdi:camera" - if self._attr_native_value == STATE_ON - else "mdi:camera-off" - ) - return self.entity_description.icon - - async def async_update(self) -> None: - """Get the latest data and updates the state.""" - _LOGGER.debug("Pulling data from %s sensor", self.name) - await self._camera.update() - - if self.entity_description.key == "last_activity_time": - last_activity = await self._camera.get_last_activity(force_refresh=True) - if last_activity is not None: - last_activity_time = as_local(last_activity.end_time_utc) - self._attr_native_value = ( - f"{last_activity_time.hour:0>2}:{last_activity_time.minute:0>2}" - ) - else: - state = getattr(self._camera, self.entity_description.key, None) - if isinstance(state, bool): - self._attr_native_value = STATE_ON if state is True else STATE_OFF - else: - self._attr_native_value = state diff --git a/homeassistant/components/logi_circle/services.yaml b/homeassistant/components/logi_circle/services.yaml deleted file mode 100644 index cb855a953a6..00000000000 --- a/homeassistant/components/logi_circle/services.yaml +++ /dev/null @@ -1,53 +0,0 @@ -# Describes the format for available Logi Circle services - -set_config: - fields: - entity_id: - selector: - entity: - integration: logi_circle - domain: camera - mode: - required: true - selector: - select: - options: - - "LED" - - "RECORDING_MODE" - value: - required: true - selector: - boolean: - -livestream_snapshot: - fields: - entity_id: - selector: - entity: - integration: logi_circle - domain: camera - filename: - required: true - example: "/tmp/snapshot_{{ entity_id }}.jpg" - selector: - text: - -livestream_record: - fields: - entity_id: - selector: - entity: - integration: logi_circle - domain: camera - filename: - required: true - example: "/tmp/snapshot_{{ entity_id }}.mp4" - selector: - text: - duration: - required: true - selector: - number: - min: 1 - max: 3600 - unit_of_measurement: seconds diff --git a/homeassistant/components/logi_circle/strings.json b/homeassistant/components/logi_circle/strings.json deleted file mode 100644 index be0f4632c25..00000000000 --- a/homeassistant/components/logi_circle/strings.json +++ /dev/null @@ -1,105 +0,0 @@ -{ - "config": { - "step": { - "user": { - "title": "Authentication Provider", - "description": "Pick via which authentication provider you want to authenticate with Logi Circle.", - "data": { - "flow_impl": "Provider" - } - }, - "auth": { - "title": "Authenticate with Logi Circle", - "description": "Please follow the link below and **Accept** access to your Logi Circle account, then come back and press **Submit** below.\n\n[Link]({authorization_url})" - } - }, - "error": { - "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", - "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", - "follow_link": "Please follow the link and authenticate before pressing Submit." - }, - "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", - "external_error": "Exception occurred from another flow.", - "external_setup": "Logi Circle successfully configured from another flow.", - "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]" - } - }, - "entity": { - "sensor": { - "last_activity": { - "name": "Last activity" - }, - "recording_mode": { - "name": "Recording mode" - }, - "wifi_signal_category": { - "name": "Wi-Fi signal category" - }, - "wifi_signal_strength": { - "name": "Wi-Fi signal strength" - }, - "streaming_mode": { - "name": "Streaming mode" - } - } - }, - "issues": { - "integration_removed": { - "title": "The Logi Circle integration has been deprecated and will be removed", - "description": "Logitech stopped accepting applications for access to the Logi Circle API in May 2022, and the Logi Circle integration will be removed from Home Assistant.\n\nTo resolve this issue, please remove the integration entries from your Home Assistant setup. [Click here to see your existing Logi Circle integration entries]({entries})." - } - }, - "services": { - "set_config": { - "name": "Set config", - "description": "Sets a configuration property.", - "fields": { - "entity_id": { - "name": "Entity", - "description": "Name(s) of entities to apply the operation mode to." - }, - "mode": { - "name": "[%key:common::config_flow::data::mode%]", - "description": "Operation mode. Allowed values: LED, RECORDING_MODE." - }, - "value": { - "name": "Value", - "description": "Operation value." - } - } - }, - "livestream_snapshot": { - "name": "Livestream snapshot", - "description": "Takes a snapshot from the camera's livestream. Will wake the camera from sleep if required.", - "fields": { - "entity_id": { - "name": "Entity", - "description": "Name(s) of entities to create snapshots from." - }, - "filename": { - "name": "File name", - "description": "Template of a Filename. Variable is entity_id." - } - } - }, - "livestream_record": { - "name": "Livestream record", - "description": "Takes a video recording from the camera's livestream.", - "fields": { - "entity_id": { - "name": "Entity", - "description": "Name(s) of entities to create recordings from." - }, - "filename": { - "name": "File name", - "description": "[%key:component::logi_circle::services::livestream_snapshot::fields::filename::description%]" - }, - "duration": { - "name": "Duration", - "description": "Recording duration." - } - } - } - } -} diff --git a/homeassistant/components/lovelace/icons.json b/homeassistant/components/lovelace/icons.json index fe0a0e114ae..8261dc2d0c9 100644 --- a/homeassistant/components/lovelace/icons.json +++ b/homeassistant/components/lovelace/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload_resources": "mdi:reload" + "reload_resources": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/luftdaten/__init__.py b/homeassistant/components/luftdaten/__init__.py index 2ef7864566f..9079b056731 100644 --- a/homeassistant/components/luftdaten/__init__.py +++ b/homeassistant/components/luftdaten/__init__.py @@ -15,7 +15,6 @@ from luftdaten.exceptions import LuftdatenError from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import CONF_SENSOR_ID, DEFAULT_SCAN_INTERVAL, DOMAIN @@ -24,8 +23,6 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Sensor.Community as config entry.""" diff --git a/homeassistant/components/lupusec/__init__.py b/homeassistant/components/lupusec/__init__.py index 51bba44aef0..c0593674972 100644 --- a/homeassistant/components/lupusec/__init__.py +++ b/homeassistant/components/lupusec/__init__.py @@ -5,24 +5,10 @@ import logging import lupupy from lupupy.exceptions import LupusecException -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_HOST, - CONF_IP_ADDRESS, - CONF_NAME, - CONF_PASSWORD, - CONF_USERNAME, - Platform, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType - -from .const import INTEGRATION_TITLE, ISSUE_PLACEHOLDER +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant _LOGGER = logging.getLogger(__name__) @@ -31,19 +17,6 @@ DOMAIN = "lupusec" NOTIFICATION_ID = "lupusec_notification" NOTIFICATION_TITLE = "Lupusec Security Setup" -CONFIG_SCHEMA = vol.Schema( - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_USERNAME): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - vol.Required(CONF_IP_ADDRESS): cv.string, - vol.Optional(CONF_NAME): cv.string, - } - ) - }, - extra=vol.ALLOW_EXTRA, -) PLATFORMS: list[Platform] = [ Platform.ALARM_CONTROL_PANEL, @@ -52,56 +25,6 @@ PLATFORMS: list[Platform] = [ ] -async def handle_async_init_result(hass: HomeAssistant, domain: str, conf: dict): - """Handle the result of the async_init to issue deprecated warnings.""" - flow = hass.config_entries.flow - result = await flow.async_init(domain, context={"source": SOURCE_IMPORT}, data=conf) - - if ( - result["type"] == FlowResultType.CREATE_ENTRY - or result["reason"] == "already_configured" - ): - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": INTEGRATION_TITLE, - }, - ) - else: - async_create_issue( - hass, - DOMAIN, - f"deprecated_yaml_import_issue_{result['reason']}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{result['reason']}", - translation_placeholders=ISSUE_PLACEHOLDER, - ) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the lupusec integration.""" - - if DOMAIN not in config: - return True - - conf = config[DOMAIN] - - hass.async_create_task(handle_async_init_result(hass, DOMAIN, conf)) - - return True - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up this integration using UI.""" diff --git a/homeassistant/components/lupusec/config_flow.py b/homeassistant/components/lupusec/config_flow.py index 82162bccf80..45b2b2b0cd8 100644 --- a/homeassistant/components/lupusec/config_flow.py +++ b/homeassistant/components/lupusec/config_flow.py @@ -8,13 +8,7 @@ import lupupy import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult -from homeassistant.const import ( - CONF_HOST, - CONF_IP_ADDRESS, - CONF_NAME, - CONF_PASSWORD, - CONF_USERNAME, -) +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -66,37 +60,6 @@ class LupusecConfigFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import the yaml config.""" - self._async_abort_entries_match( - { - CONF_HOST: user_input[CONF_IP_ADDRESS], - CONF_USERNAME: user_input[CONF_USERNAME], - CONF_PASSWORD: user_input[CONF_PASSWORD], - } - ) - host = user_input[CONF_IP_ADDRESS] - username = user_input[CONF_USERNAME] - password = user_input[CONF_PASSWORD] - try: - await test_host_connection(self.hass, host, username, password) - except CannotConnect: - return self.async_abort(reason="cannot_connect") - except JSONDecodeError: - return self.async_abort(reason="cannot_connect") - except Exception: - _LOGGER.exception("Unexpected exception") - return self.async_abort(reason="unknown") - - return self.async_create_entry( - title=user_input.get(CONF_NAME, host), - data={ - CONF_HOST: host, - CONF_USERNAME: username, - CONF_PASSWORD: password, - }, - ) - async def test_host_connection( hass: HomeAssistant, host: str, username: str, password: str diff --git a/homeassistant/components/lupusec/const.py b/homeassistant/components/lupusec/const.py index 489d878306d..4904bc481a7 100644 --- a/homeassistant/components/lupusec/const.py +++ b/homeassistant/components/lupusec/const.py @@ -18,10 +18,6 @@ from lupupy.constants import ( DOMAIN = "lupusec" -INTEGRATION_TITLE = "Lupus Electronics LUPUSEC" -ISSUE_PLACEHOLDER = {"url": "/config/integrations/dashboard/add?domain=lupusec"} - - TYPE_TRANSLATION = { TYPE_WINDOW: "Fensterkontakt", TYPE_DOOR: "Türkontakt", diff --git a/homeassistant/components/lupusec/strings.json b/homeassistant/components/lupusec/strings.json index 6fa59aaeb3d..907232e0665 100644 --- a/homeassistant/components/lupusec/strings.json +++ b/homeassistant/components/lupusec/strings.json @@ -17,15 +17,5 @@ "abort": { "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } - }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Lupus Electronics LUPUSEC YAML configuration import failed", - "description": "Configuring Lupus Electronics LUPUSEC using YAML is being removed but there was a connection error importing your YAML configuration.\n\nEnsure connection to Lupus Electronics LUPUSEC works and restart Home Assistant to try again or remove the Lupus Electronics LUPUSEC YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - }, - "deprecated_yaml_import_issue_unknown": { - "title": "The Lupus Electronics LUPUSEC YAML configuration import failed", - "description": "Configuring Lupus Electronics LUPUSEC using YAML is being removed but there was an unknown error when trying to import the YAML configuration.\n\nEnsure the imported configuration is correct and remove the Lupus Electronics LUPUSEC YAML configuration from your configuration.yaml file and continue to [set up the integration]({url}) manually." - } } } diff --git a/homeassistant/components/lutron/__init__.py b/homeassistant/components/lutron/__init__.py index 1521a05df8e..45a51eb6df8 100644 --- a/homeassistant/components/lutron/__init__.py +++ b/homeassistant/components/lutron/__init__.py @@ -82,8 +82,6 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b elif output.type == "CEILING_FAN_TYPE": entry_data.fans.append((area.name, output)) platform = Platform.FAN - # Deprecated, should be removed in 2024.8 - entry_data.lights.append((area.name, output)) elif output.is_dimmable: entry_data.lights.append((area.name, output)) platform = Platform.LIGHT diff --git a/homeassistant/components/lutron/light.py b/homeassistant/components/lutron/light.py index eb003fd431a..7e8829b231c 100644 --- a/homeassistant/components/lutron/light.py +++ b/homeassistant/components/lutron/light.py @@ -3,12 +3,10 @@ from __future__ import annotations from collections.abc import Mapping -import logging from typing import Any from pylutron import Output -from homeassistant.components.automation import automations_with_entity from homeassistant.components.light import ( ATTR_BRIGHTNESS, ATTR_FLASH, @@ -17,23 +15,13 @@ from homeassistant.components.light import ( LightEntity, LightEntityFeature, ) -from homeassistant.components.script import scripts_with_entity from homeassistant.config_entries import ConfigEntry -from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import ( - IssueSeverity, - async_create_issue, - create_issue, -) from . import DOMAIN, LutronData from .entity import LutronDevice -_LOGGER = logging.getLogger(__name__) - async def async_setup_entry( hass: HomeAssistant, @@ -45,50 +33,13 @@ async def async_setup_entry( Adds dimmers from the Main Repeater associated with the config_entry as light entities. """ - ent_reg = er.async_get(hass) entry_data: LutronData = hass.data[DOMAIN][config_entry.entry_id] - lights = [] - - for area_name, device in entry_data.lights: - if device.type == "CEILING_FAN_TYPE": - # If this is a fan, check to see if this entity already exists. - # If not, do not create a new one. - entity_id = ent_reg.async_get_entity_id( - Platform.LIGHT, - DOMAIN, - f"{entry_data.client.guid}_{device.uuid}", - ) - if entity_id: - entity_entry = ent_reg.async_get(entity_id) - assert entity_entry - if entity_entry.disabled: - # If the entity exists and is disabled then we want to remove - # the entity so that the user is using the new fan entity instead. - ent_reg.async_remove(entity_id) - else: - lights.append(LutronLight(area_name, device, entry_data.client)) - entity_automations = automations_with_entity(hass, entity_id) - entity_scripts = scripts_with_entity(hass, entity_id) - for item in entity_automations + entity_scripts: - async_create_issue( - hass, - DOMAIN, - f"deprecated_light_fan_{entity_id}_{item}", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_light_fan_entity", - translation_placeholders={ - "entity": entity_id, - "info": item, - }, - ) - else: - lights.append(LutronLight(area_name, device, entry_data.client)) async_add_entities( - lights, + ( + LutronLight(area_name, device, entry_data.client) + for area_name, device in entry_data.lights + ), True, ) @@ -113,24 +64,8 @@ class LutronLight(LutronDevice, LightEntity): _prev_brightness: int | None = None _attr_name = None - def __init__(self, area_name, lutron_device, controller) -> None: - """Initialize the light.""" - super().__init__(area_name, lutron_device, controller) - self._is_fan = lutron_device.type == "CEILING_FAN_TYPE" - def turn_on(self, **kwargs: Any) -> None: """Turn the light on.""" - if self._is_fan: - create_issue( - self.hass, - DOMAIN, - "deprecated_light_fan_on", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_light_fan_on", - ) if flash := kwargs.get(ATTR_FLASH): self._lutron_device.flash(0.5 if flash == "short" else 1.5) else: @@ -148,17 +83,6 @@ class LutronLight(LutronDevice, LightEntity): def turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" - if self._is_fan: - create_issue( - self.hass, - DOMAIN, - "deprecated_light_fan_off", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_light_fan_off", - ) args = {"new_level": 0} if ATTR_TRANSITION in kwargs: args["fade_time_seconds"] = kwargs[ATTR_TRANSITION] diff --git a/homeassistant/components/lutron/strings.json b/homeassistant/components/lutron/strings.json index d5197375dc1..770a453eb9e 100644 --- a/homeassistant/components/lutron/strings.json +++ b/homeassistant/components/lutron/strings.json @@ -36,19 +36,5 @@ } } } - }, - "issues": { - "deprecated_light_fan_entity": { - "title": "Detected Lutron fan entity created as a light", - "description": "Fan entities have been added to the Lutron integration.\nWe detected that entity `{entity}` is being used in `{info}`\n\nWe have created a new fan entity and you should migrate `{info}` to use this new entity.\n\nWhen you are done migrating `{info}` and are ready to have the deprecated light entity removed, disable the entity and restart Home Assistant." - }, - "deprecated_light_fan_on": { - "title": "The Lutron integration deprecated fan turned on", - "description": "Fan entities have been added to the Lutron integration.\nPreviously fans were created as lights; this behavior is now deprecated.\n\nYour configuration just turned on a fan created as a light. You should migrate your scenes and automations to use the new fan entity.\n\nWhen you are done migrating your automations and are ready to have the deprecated light entity removed, disable the entity and restart Home Assistant.\n\nAn issue will be created each time the incorrect entity is used to remind you to migrate." - }, - "deprecated_light_fan_off": { - "title": "The Lutron integration deprecated fan turned off", - "description": "Fan entities have been added to the Lutron integration.\nPreviously fans were created as lights; this behavior is now deprecated.\n\nYour configuration just turned off a fan created as a light. You should migrate your scenes and automations to use the new fan entity.\n\nWhen you are done migrating your automations and are ready to have the deprecated light entity removed, disable the entity and restart Home Assistant.\n\nAn issue will be created each time the incorrect entity is used to remind you to migrate." - } } } diff --git a/homeassistant/components/lutron_caseta/config_flow.py b/homeassistant/components/lutron_caseta/config_flow.py index d7b47aebc7e..703fbb813c6 100644 --- a/homeassistant/components/lutron_caseta/config_flow.py +++ b/homeassistant/components/lutron_caseta/config_flow.py @@ -6,6 +6,7 @@ import asyncio import logging import os import ssl +from typing import Any from pylutron_caseta.pairing import PAIR_CA, PAIR_CERT, PAIR_KEY, async_pair from pylutron_caseta.smartbridge import Smartbridge @@ -50,14 +51,16 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize a Lutron Caseta flow.""" - self.data = {} - self.lutron_id = None + self.data: dict[str, Any] = {} + self.lutron_id: str | None = None self.tls_assets_validated = False self.attempted_tls_validation = False - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if user_input is not None: self.data[CONF_HOST] = user_input[CONF_HOST] @@ -163,21 +166,21 @@ class LutronCasetaFlowHandler(ConfigFlow, domain=DOMAIN): for asset_key, conf_key in FILE_MAPPING.items(): self.data[conf_key] = TLS_ASSET_TEMPLATE.format(self.bridge_id, asset_key) - async def async_step_import(self, import_info): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a new Caseta bridge as a config entry. This flow is triggered by `async_setup`. """ - host = import_info[CONF_HOST] + host = import_data[CONF_HOST] # Store the imported config for other steps in this flow to access. self.data[CONF_HOST] = host # Abort if existing entry with matching host exists. self._async_abort_entries_match({CONF_HOST: self.data[CONF_HOST]}) - self.data[CONF_KEYFILE] = import_info[CONF_KEYFILE] - self.data[CONF_CERTFILE] = import_info[CONF_CERTFILE] - self.data[CONF_CA_CERTS] = import_info[CONF_CA_CERTS] + self.data[CONF_KEYFILE] = import_data[CONF_KEYFILE] + self.data[CONF_CERTFILE] = import_data[CONF_CERTFILE] + self.data[CONF_CA_CERTS] = import_data[CONF_CA_CERTS] if not (lutron_id := await self.async_get_lutron_id()): # Ultimately we won't have a dedicated step for import failure, but diff --git a/homeassistant/components/lyric/__init__.py b/homeassistant/components/lyric/__init__.py index e1eaed6602c..6c35e084424 100644 --- a/homeassistant/components/lyric/__init__.py +++ b/homeassistant/components/lyric/__init__.py @@ -12,7 +12,7 @@ from aiolyric import Lyric from aiolyric.exceptions import LyricAuthenticationException, LyricException from aiolyric.objects.device import LyricDevice from aiolyric.objects.location import LyricLocation -from aiolyric.objects.priority import LyricAccessories, LyricRoom +from aiolyric.objects.priority import LyricAccessory, LyricRoom from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform @@ -80,11 +80,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: await lyric.get_locations() await asyncio.gather( *( - lyric.get_thermostat_rooms(location.locationID, device.deviceID) + lyric.get_thermostat_rooms( + location.location_id, device.device_id + ) for location in lyric.locations for device in location.devices - if device.deviceClass == "Thermostat" - and device.deviceID.startswith("LCC") + if device.device_class == "Thermostat" + and device.device_id.startswith("LCC") ) ) @@ -143,7 +145,7 @@ class LyricEntity(CoordinatorEntity[DataUpdateCoordinator[Lyric]]): super().__init__(coordinator) self._key = key self._location = location - self._mac_id = device.macID + self._mac_id = device.mac_id self._update_thermostat = coordinator.data.update_thermostat self._update_fan = coordinator.data.update_fan @@ -155,7 +157,7 @@ class LyricEntity(CoordinatorEntity[DataUpdateCoordinator[Lyric]]): @property def location(self) -> LyricLocation: """Get the Lyric Location.""" - return self.coordinator.data.locations_dict[self._location.locationID] + return self.coordinator.data.locations_dict[self._location.location_id] @property def device(self) -> LyricDevice: @@ -173,7 +175,7 @@ class LyricDeviceEntity(LyricEntity): identifiers={(dr.CONNECTION_NETWORK_MAC, self._mac_id)}, connections={(dr.CONNECTION_NETWORK_MAC, self._mac_id)}, manufacturer="Honeywell", - model=self.device.deviceModel, + model=self.device.device_model, name=f"{self.device.name} Thermostat", ) @@ -187,7 +189,7 @@ class LyricAccessoryEntity(LyricDeviceEntity): location: LyricLocation, device: LyricDevice, room: LyricRoom, - accessory: LyricAccessories, + accessory: LyricAccessory, key: str, ) -> None: """Initialize the Honeywell Lyric accessory entity.""" @@ -207,7 +209,7 @@ class LyricAccessoryEntity(LyricDeviceEntity): }, manufacturer="Honeywell", model="RCHTSENSOR", - name=f"{self.room.roomName} Sensor", + name=f"{self.room.room_name} Sensor", via_device=(dr.CONNECTION_NETWORK_MAC, self._mac_id), ) @@ -217,7 +219,7 @@ class LyricAccessoryEntity(LyricDeviceEntity): return self.coordinator.data.rooms_dict[self._mac_id][self._room_id] @property - def accessory(self) -> LyricAccessories: + def accessory(self) -> LyricAccessory: """Get the Lyric Device.""" return next( accessory diff --git a/homeassistant/components/lyric/climate.py b/homeassistant/components/lyric/climate.py index 50add155915..1c459c2c66a 100644 --- a/homeassistant/components/lyric/climate.py +++ b/homeassistant/components/lyric/climate.py @@ -132,7 +132,7 @@ async def async_setup_entry( LyricClimate( coordinator, ClimateEntityDescription( - key=f"{device.macID}_thermostat", + key=f"{device.mac_id}_thermostat", name=device.name, ), location, @@ -185,7 +185,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): ) -> None: """Initialize Honeywell Lyric climate entity.""" # Define thermostat type (TCC - e.g., Lyric round; LCC - e.g., T5,6) - if device.changeableValues.thermostatSetpointStatus: + if device.changeable_values.thermostat_setpoint_status: self._attr_thermostat_type = LyricThermostatType.LCC else: self._attr_thermostat_type = LyricThermostatType.TCC @@ -202,15 +202,15 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): self._attr_hvac_modes = [HVACMode.OFF] # Add supported lyric thermostat features - if LYRIC_HVAC_MODE_HEAT in device.allowedModes: + if LYRIC_HVAC_MODE_HEAT in device.allowed_modes: self._attr_hvac_modes.append(HVACMode.HEAT) - if LYRIC_HVAC_MODE_COOL in device.allowedModes: + if LYRIC_HVAC_MODE_COOL in device.allowed_modes: self._attr_hvac_modes.append(HVACMode.COOL) if ( - LYRIC_HVAC_MODE_HEAT in device.allowedModes - and LYRIC_HVAC_MODE_COOL in device.allowedModes + LYRIC_HVAC_MODE_HEAT in device.allowed_modes + and LYRIC_HVAC_MODE_COOL in device.allowed_modes ): self._attr_hvac_modes.append(HVACMode.HEAT_COOL) @@ -242,19 +242,19 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): coordinator, location, device, - f"{device.macID}_thermostat", + f"{device.mac_id}_thermostat", ) self.entity_description = description @property def current_temperature(self) -> float | None: """Return the current temperature.""" - return self.device.indoorTemperature + return self.device.indoor_temperature @property def hvac_action(self) -> HVACAction | None: """Return the current hvac action.""" - action = HVAC_ACTIONS.get(self.device.operationStatus.mode, None) + action = HVAC_ACTIONS.get(self.device.operation_status.mode, None) if action == HVACAction.OFF and self.hvac_mode != HVACMode.OFF: action = HVACAction.IDLE return action @@ -262,63 +262,63 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): @property def hvac_mode(self) -> HVACMode: """Return the hvac mode.""" - return HVAC_MODES[self.device.changeableValues.mode] + return HVAC_MODES[self.device.changeable_values.mode] @property def target_temperature(self) -> float | None: """Return the temperature we try to reach.""" device = self.device if ( - device.changeableValues.autoChangeoverActive - or HVAC_MODES[device.changeableValues.mode] == HVACMode.OFF + device.changeable_values.auto_changeover_active + or HVAC_MODES[device.changeable_values.mode] == HVACMode.OFF ): return None if self.hvac_mode == HVACMode.COOL: - return device.changeableValues.coolSetpoint - return device.changeableValues.heatSetpoint + return device.changeable_values.cool_setpoint + return device.changeable_values.heat_setpoint @property def target_temperature_high(self) -> float | None: """Return the highbound target temperature we try to reach.""" device = self.device if ( - not device.changeableValues.autoChangeoverActive - or HVAC_MODES[device.changeableValues.mode] == HVACMode.OFF + not device.changeable_values.auto_changeover_active + or HVAC_MODES[device.changeable_values.mode] == HVACMode.OFF ): return None - return device.changeableValues.coolSetpoint + return device.changeable_values.cool_setpoint @property def target_temperature_low(self) -> float | None: """Return the lowbound target temperature we try to reach.""" device = self.device if ( - not device.changeableValues.autoChangeoverActive - or HVAC_MODES[device.changeableValues.mode] == HVACMode.OFF + not device.changeable_values.auto_changeover_active + or HVAC_MODES[device.changeable_values.mode] == HVACMode.OFF ): return None - return device.changeableValues.heatSetpoint + return device.changeable_values.heat_setpoint @property def preset_mode(self) -> str | None: """Return current preset mode.""" - return self.device.changeableValues.thermostatSetpointStatus + return self.device.changeable_values.thermostat_setpoint_status @property def min_temp(self) -> float: """Identify min_temp in Lyric API or defaults if not available.""" device = self.device - if LYRIC_HVAC_MODE_COOL in device.allowedModes: - return device.minCoolSetpoint - return device.minHeatSetpoint + if LYRIC_HVAC_MODE_COOL in device.allowed_modes: + return device.min_cool_setpoint + return device.min_heat_setpoint @property def max_temp(self) -> float: """Identify max_temp in Lyric API or defaults if not available.""" device = self.device - if LYRIC_HVAC_MODE_HEAT in device.allowedModes: - return device.maxHeatSetpoint - return device.maxCoolSetpoint + if LYRIC_HVAC_MODE_HEAT in device.allowed_modes: + return device.max_heat_setpoint + return device.max_cool_setpoint @property def fan_mode(self) -> str | None: @@ -339,7 +339,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW) target_temp_high = kwargs.get(ATTR_TARGET_TEMP_HIGH) - if device.changeableValues.mode == LYRIC_HVAC_MODE_HEAT_COOL: + if device.changeable_values.mode == LYRIC_HVAC_MODE_HEAT_COOL: if target_temp_low is None or target_temp_high is None: raise HomeAssistantError( "Could not find target_temp_low and/or target_temp_high in" @@ -349,7 +349,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): # If TCC device pass the heatCoolMode value, otherwise # if LCC device can skip the mode altogether if self._attr_thermostat_type is LyricThermostatType.TCC: - mode = HVAC_MODES[device.changeableValues.heatCoolMode] + mode = HVAC_MODES[device.changeable_values.heat_cool_mode] else: mode = None @@ -401,7 +401,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): # otherwise it turns to Auto briefly and then reverts to Off. # This is the behavior that happens with the native app as well, # so likely a bug in the api itself. - if HVAC_MODES[self.device.changeableValues.mode] == HVACMode.OFF: + if HVAC_MODES[self.device.changeable_values.mode] == HVACMode.OFF: _LOGGER.debug( "HVAC mode passed to lyric: %s", HVAC_MODES[LYRIC_HVAC_MODE_COOL], @@ -427,7 +427,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): else: _LOGGER.debug( "HVAC mode passed to lyric: %s", - HVAC_MODES[self.device.changeableValues.mode], + HVAC_MODES[self.device.changeable_values.mode], ) await self._update_thermostat( self.location, self.device, autoChangeoverActive=True @@ -448,7 +448,7 @@ class LyricClimate(LyricDeviceEntity, ClimateEntity): # otherwise leave unchanged. if ( LYRIC_HVAC_MODES[hvac_mode] == LYRIC_HVAC_MODE_HEAT_COOL - and not self.device.changeableValues.autoChangeoverActive + and not self.device.changeable_values.auto_changeover_active ): auto_changeover = True else: diff --git a/homeassistant/components/lyric/icons.json b/homeassistant/components/lyric/icons.json index 555215f8685..edb61c3f8e2 100644 --- a/homeassistant/components/lyric/icons.json +++ b/homeassistant/components/lyric/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "set_hold_time": "mdi:timer-pause" + "set_hold_time": { + "service": "mdi:timer-pause" + } } } diff --git a/homeassistant/components/lyric/manifest.json b/homeassistant/components/lyric/manifest.json index a55f9c1d7cb..8bed909ace2 100644 --- a/homeassistant/components/lyric/manifest.json +++ b/homeassistant/components/lyric/manifest.json @@ -22,5 +22,5 @@ "iot_class": "cloud_polling", "loggers": ["aiolyric"], "quality_scale": "silver", - "requirements": ["aiolyric==1.1.0"] + "requirements": ["aiolyric==2.0.1"] } diff --git a/homeassistant/components/lyric/sensor.py b/homeassistant/components/lyric/sensor.py index 9f05354c399..7e006bc7bfe 100644 --- a/homeassistant/components/lyric/sensor.py +++ b/homeassistant/components/lyric/sensor.py @@ -9,7 +9,7 @@ from datetime import datetime, timedelta from aiolyric import Lyric from aiolyric.objects.device import LyricDevice from aiolyric.objects.location import LyricLocation -from aiolyric.objects.priority import LyricAccessories, LyricRoom +from aiolyric.objects.priority import LyricAccessory, LyricRoom from homeassistant.components.sensor import ( SensorDeviceClass, @@ -55,8 +55,8 @@ class LyricSensorEntityDescription(SensorEntityDescription): class LyricSensorAccessoryEntityDescription(SensorEntityDescription): """Class describing Honeywell Lyric room sensor entities.""" - value_fn: Callable[[LyricRoom, LyricAccessories], StateType | datetime] - suitable_fn: Callable[[LyricRoom, LyricAccessories], bool] + value_fn: Callable[[LyricRoom, LyricAccessory], StateType | datetime] + suitable_fn: Callable[[LyricRoom, LyricAccessory], bool] DEVICE_SENSORS: list[LyricSensorEntityDescription] = [ @@ -65,8 +65,8 @@ DEVICE_SENSORS: list[LyricSensorEntityDescription] = [ translation_key="indoor_temperature", device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda device: device.indoorTemperature, - suitable_fn=lambda device: device.indoorTemperature, + value_fn=lambda device: device.indoor_temperature, + suitable_fn=lambda device: device.indoor_temperature, ), LyricSensorEntityDescription( key="indoor_humidity", @@ -74,16 +74,16 @@ DEVICE_SENSORS: list[LyricSensorEntityDescription] = [ device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda device: device.indoorHumidity, - suitable_fn=lambda device: device.indoorHumidity, + value_fn=lambda device: device.indoor_humidity, + suitable_fn=lambda device: device.indoor_humidity, ), LyricSensorEntityDescription( key="outdoor_temperature", translation_key="outdoor_temperature", device_class=SensorDeviceClass.TEMPERATURE, state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda device: device.outdoorTemperature, - suitable_fn=lambda device: device.outdoorTemperature, + value_fn=lambda device: device.outdoor_temperature, + suitable_fn=lambda device: device.outdoor_temperature, ), LyricSensorEntityDescription( key="outdoor_humidity", @@ -91,29 +91,30 @@ DEVICE_SENSORS: list[LyricSensorEntityDescription] = [ device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda device: device.displayedOutdoorHumidity, - suitable_fn=lambda device: device.displayedOutdoorHumidity, + value_fn=lambda device: device.displayed_outdoor_humidity, + suitable_fn=lambda device: device.displayed_outdoor_humidity, ), LyricSensorEntityDescription( key="next_period_time", translation_key="next_period_time", device_class=SensorDeviceClass.TIMESTAMP, value_fn=lambda device: get_datetime_from_future_time( - device.changeableValues.nextPeriodTime + device.changeable_values.next_period_time ), suitable_fn=lambda device: ( - device.changeableValues and device.changeableValues.nextPeriodTime + device.changeable_values and device.changeable_values.next_period_time ), ), LyricSensorEntityDescription( key="setpoint_status", translation_key="setpoint_status", value_fn=lambda device: get_setpoint_status( - device.changeableValues.thermostatSetpointStatus, - device.changeableValues.nextPeriodTime, + device.changeable_values.thermostat_setpoint_status, + device.changeable_values.next_period_time, ), suitable_fn=lambda device: ( - device.changeableValues and device.changeableValues.thermostatSetpointStatus + device.changeable_values + and device.changeable_values.thermostat_setpoint_status ), ), ] @@ -133,7 +134,7 @@ ACCESSORY_SENSORS: list[LyricSensorAccessoryEntityDescription] = [ device_class=SensorDeviceClass.HUMIDITY, state_class=SensorStateClass.MEASUREMENT, native_unit_of_measurement=PERCENTAGE, - value_fn=lambda room, _: room.roomAvgHumidity, + value_fn=lambda room, _: room.room_avg_humidity, suitable_fn=lambda _, accessory: accessory.type == "IndoorAirSensor", ), ] @@ -182,7 +183,7 @@ async def async_setup_entry( ) for location in coordinator.data.locations for device in location.devices - for room in coordinator.data.rooms_dict.get(device.macID, {}).values() + for room in coordinator.data.rooms_dict.get(device.mac_id, {}).values() for accessory in room.accessories for accessory_sensor in ACCESSORY_SENSORS if accessory_sensor.suitable_fn(room, accessory) @@ -206,7 +207,7 @@ class LyricSensor(LyricDeviceEntity, SensorEntity): coordinator, location, device, - f"{device.macID}_{description.key}", + f"{device.mac_id}_{description.key}", ) self.entity_description = description if description.device_class == SensorDeviceClass.TEMPERATURE: @@ -233,7 +234,7 @@ class LyricAccessorySensor(LyricAccessoryEntity, SensorEntity): location: LyricLocation, parentDevice: LyricDevice, room: LyricRoom, - accessory: LyricAccessories, + accessory: LyricAccessory, ) -> None: """Initialize.""" super().__init__( @@ -242,7 +243,7 @@ class LyricAccessorySensor(LyricAccessoryEntity, SensorEntity): parentDevice, room, accessory, - f"{parentDevice.macID}_room{room.id}_acc{accessory.id}_{description.key}", + f"{parentDevice.mac_id}_room{room.id}_acc{accessory.id}_{description.key}", ) self.entity_description = description if description.device_class == SensorDeviceClass.TEMPERATURE: diff --git a/homeassistant/components/madvr/config_flow.py b/homeassistant/components/madvr/config_flow.py index cf43e03a68b..1ca1dd296d8 100644 --- a/homeassistant/components/madvr/config_flow.py +++ b/homeassistant/components/madvr/config_flow.py @@ -8,8 +8,9 @@ import aiohttp from madvr.madvr import HeartBeatError, Madvr import voluptuous as vol -from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant from .const import DEFAULT_NAME, DEFAULT_PORT, DOMAIN from .errors import CannotConnect @@ -18,13 +19,8 @@ _LOGGER = logging.getLogger(__name__) STEP_USER_DATA_SCHEMA = vol.Schema( { - vol.Required( - CONF_HOST, - ): str, - vol.Required( - CONF_PORT, - default=DEFAULT_PORT, - ): int, + vol.Required(CONF_HOST): str, + vol.Required(CONF_PORT, default=DEFAULT_PORT): int, } ) @@ -36,81 +32,120 @@ class MadVRConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 + entry: ConfigEntry | None = None + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: """Handle the initial step.""" + return await self._handle_config_step(user_input) + + async def async_step_reconfigure( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle reconfiguration of the device.""" + self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + return await self.async_step_reconfigure_confirm(user_input) + + async def async_step_reconfigure_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a reconfiguration flow initialized by the user.""" + return await self._handle_config_step(user_input, step_id="reconfigure") + + async def _handle_config_step( + self, user_input: dict[str, Any] | None = None, step_id: str = "user" + ) -> ConfigFlowResult: + """Handle the configuration step for both initial setup and reconfiguration.""" errors: dict[str, str] = {} if user_input is not None: + _LOGGER.debug("User input: %s", user_input) host = user_input[CONF_HOST] port = user_input[CONF_PORT] try: - # ensure we can connect and get the mac address from device - mac = await self._test_connection(host, port) + mac = await test_connection(self.hass, host, port) except CannotConnect: _LOGGER.error("CannotConnect error caught") errors["base"] = "cannot_connect" else: if not mac: errors["base"] = "no_mac" - if not errors: - _LOGGER.debug("MAC address found: %s", mac) - # this will prevent the user from adding the same device twice and persist the mac address - await self.async_set_unique_id(mac) - self._abort_if_unique_id_configured() + else: + _LOGGER.debug("MAC address found: %s", mac) + # abort if the detected mac differs from the one in the entry + if self.entry: + existing_mac = self.entry.unique_id + if existing_mac != mac: + _LOGGER.debug( + "MAC address changed from %s to %s", existing_mac, mac + ) + # abort + return self.async_abort(reason="set_up_new_device") - # create the entry - return self.async_create_entry( - title=DEFAULT_NAME, - data=user_input, - ) + _LOGGER.debug("Reconfiguration done") + return self.async_update_reload_and_abort( + entry=self.entry, + data={**user_input, CONF_HOST: host, CONF_PORT: port}, + reason="reconfigure_successful", + ) + # abort if already configured with same mac + await self.async_set_unique_id(mac) + self._abort_if_unique_id_configured(updates={CONF_HOST: host}) - # this will show the form or allow the user to retry if there was an error + _LOGGER.debug("Configuration successful") + return self.async_create_entry( + title=DEFAULT_NAME, + data=user_input, + ) + _LOGGER.debug("Showing form with errors: %s", errors) return self.async_show_form( - step_id="user", + step_id=step_id, data_schema=self.add_suggested_values_to_schema( STEP_USER_DATA_SCHEMA, user_input ), errors=errors, ) - async def _test_connection(self, host: str, port: int) -> str: - """Test if we can connect to the device and grab the mac.""" - madvr_client = Madvr(host=host, port=port, loop=self.hass.loop) - _LOGGER.debug("Testing connection to madVR at %s:%s", host, port) - # try to connect - try: - await asyncio.wait_for(madvr_client.open_connection(), timeout=15) - # connection can raise HeartBeatError if the device is not available or connection does not work - except (TimeoutError, aiohttp.ClientError, OSError, HeartBeatError) as err: - _LOGGER.error("Error connecting to madVR: %s", err) - raise CannotConnect from err - # check if we are connected - if not madvr_client.connected: - raise CannotConnect("Connection failed") +async def test_connection(hass: HomeAssistant, host: str, port: int) -> str: + """Test if we can connect to the device and grab the mac.""" + madvr_client = Madvr(host=host, port=port, loop=hass.loop) + _LOGGER.debug("Testing connection to madVR at %s:%s", host, port) + # try to connect + try: + await asyncio.wait_for(madvr_client.open_connection(), timeout=15) + # connection can raise HeartBeatError if the device is not available or connection does not work + except (TimeoutError, aiohttp.ClientError, OSError, HeartBeatError) as err: + _LOGGER.error("Error connecting to madVR: %s", err) + raise CannotConnect from err - # background tasks needed to capture realtime info - await madvr_client.async_add_tasks() + # check if we are connected + if not madvr_client.connected: + raise CannotConnect("Connection failed") - # wait for client to capture device info - retry_time = 15 - while not madvr_client.mac_address and retry_time > 0: - await asyncio.sleep(RETRY_INTERVAL) - retry_time -= 1 + # background tasks needed to capture realtime info + await madvr_client.async_add_tasks() - mac_address = madvr_client.mac_address - if mac_address: - _LOGGER.debug("Connected to madVR with MAC: %s", mac_address) - # close this connection because this client object will not be reused - await self._close_test_connection(madvr_client) - _LOGGER.debug("Connection test successful") - return mac_address + # wait for client to capture device info + retry_time = 15 + while not madvr_client.mac_address and retry_time > 0: + await asyncio.sleep(RETRY_INTERVAL) + retry_time -= 1 - async def _close_test_connection(self, madvr_client: Madvr) -> None: - """Close the test connection.""" - madvr_client.stop() - await madvr_client.async_cancel_tasks() - await madvr_client.close_connection() + mac_address = madvr_client.mac_address + if mac_address: + _LOGGER.debug("Connected to madVR with MAC: %s", mac_address) + # close this connection because this client object will not be reused + await close_test_connection(madvr_client) + _LOGGER.debug("Connection test successful") + return mac_address + + +async def close_test_connection(madvr_client: Madvr) -> None: + """Close the test connection.""" + _LOGGER.debug("Closing test connection") + madvr_client.stop() + await madvr_client.async_cancel_tasks() + await madvr_client.close_connection() diff --git a/homeassistant/components/madvr/manifest.json b/homeassistant/components/madvr/manifest.json index ce6336acabc..0ac906fdbef 100644 --- a/homeassistant/components/madvr/manifest.json +++ b/homeassistant/components/madvr/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/madvr", "integration_type": "device", "iot_class": "local_push", - "requirements": ["py-madvr2==1.6.29"] + "requirements": ["py-madvr2==1.6.32"] } diff --git a/homeassistant/components/madvr/strings.json b/homeassistant/components/madvr/strings.json index 3e8e786f775..b8d30be23aa 100644 --- a/homeassistant/components/madvr/strings.json +++ b/homeassistant/components/madvr/strings.json @@ -3,7 +3,19 @@ "step": { "user": { "title": "Setup madVR Envy", - "description": "Your device needs to be turned in order to add the integation. ", + "description": "Your device needs to be on in order to add the integation. ", + "data": { + "host": "[%key:common::config_flow::data::host%]", + "port": "[%key:common::config_flow::data::port%]" + }, + "data_description": { + "host": "The hostname or IP address of your madVR Envy device.", + "port": "The port your madVR Envy is listening on. In 99% of cases, leave this as the default." + } + }, + "reconfigure": { + "title": "Reconfigure madVR Envy", + "description": "Your device needs to be on in order to reconfigure the integation. ", "data": { "host": "[%key:common::config_flow::data::host%]", "port": "[%key:common::config_flow::data::port%]" @@ -15,11 +27,13 @@ } }, "abort": { - "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reconfigure_successful": "[%key:common::config_flow::abort::reconfigure_successful%]" }, "error": { "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", - "no_mac": "A MAC address was not found. It required to identify the device. Please ensure your device is connectable." + "no_mac": "A MAC address was not found. It required to identify the device. Please ensure your device is connectable.", + "set_up_new_device": "A new device was detected. Please set it up as a new entity instead of reconfiguring." } }, "entity": { diff --git a/homeassistant/components/mailbox/__init__.py b/homeassistant/components/mailbox/__init__.py deleted file mode 100644 index b446ba3704e..00000000000 --- a/homeassistant/components/mailbox/__init__.py +++ /dev/null @@ -1,291 +0,0 @@ -"""Support for Voice mailboxes.""" - -from __future__ import annotations - -import asyncio -from contextlib import suppress -from datetime import timedelta -from http import HTTPStatus -import logging -from typing import Any, Final - -from aiohttp import web -from aiohttp.web_exceptions import HTTPNotFound - -from homeassistant.components import frontend -from homeassistant.components.http import HomeAssistantView -from homeassistant.config import config_per_platform -from homeassistant.core import Event, HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import config_validation as cv, discovery -from homeassistant.helpers.entity import Entity -from homeassistant.helpers.entity_component import EntityComponent -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.setup import async_prepare_setup_platform - -_LOGGER = logging.getLogger(__name__) - -DOMAIN: Final = "mailbox" - -EVENT: Final = "mailbox_updated" -CONTENT_TYPE_MPEG: Final = "audio/mpeg" -CONTENT_TYPE_NONE: Final = "none" - -SCAN_INTERVAL = timedelta(seconds=30) - -CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Track states and offer events for mailboxes.""" - mailboxes: list[Mailbox] = [] - frontend.async_register_built_in_panel(hass, "mailbox", "mailbox", "mdi:mailbox") - hass.http.register_view(MailboxPlatformsView(mailboxes)) - hass.http.register_view(MailboxMessageView(mailboxes)) - hass.http.register_view(MailboxMediaView(mailboxes)) - hass.http.register_view(MailboxDeleteView(mailboxes)) - - async def async_setup_platform( - p_type: str, - p_config: ConfigType | None = None, - discovery_info: DiscoveryInfoType | None = None, - ) -> None: - """Set up a mailbox platform.""" - if p_config is None: - p_config = {} - if discovery_info is None: - discovery_info = {} - - platform = await async_prepare_setup_platform(hass, config, DOMAIN, p_type) - - if platform is None: - _LOGGER.error("Unknown mailbox platform specified") - return - - if p_type not in ["asterisk_cdr", "asterisk_mbox", "demo"]: - # Asterisk integration will raise a repair issue themselves - # For demo we don't create one - async_create_issue( - hass, - DOMAIN, - f"deprecated_mailbox_{p_type}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_mailbox_integration", - translation_placeholders={ - "integration_domain": p_type, - }, - ) - - _LOGGER.info("Setting up %s.%s", DOMAIN, p_type) - mailbox = None - try: - if hasattr(platform, "async_get_handler"): - mailbox = await platform.async_get_handler( - hass, p_config, discovery_info - ) - elif hasattr(platform, "get_handler"): - mailbox = await hass.async_add_executor_job( - platform.get_handler, hass, p_config, discovery_info - ) - else: - raise HomeAssistantError("Invalid mailbox platform.") - - if mailbox is None: - _LOGGER.error("Failed to initialize mailbox platform %s", p_type) - return - - except Exception: - _LOGGER.exception("Error setting up platform %s", p_type) - return - - mailboxes.append(mailbox) - mailbox_entity = MailboxEntity(mailbox) - component = EntityComponent[MailboxEntity]( - logging.getLogger(__name__), DOMAIN, hass, SCAN_INTERVAL - ) - component.register_shutdown() - await component.async_add_entities([mailbox_entity]) - - for p_type, p_config in config_per_platform(config, DOMAIN): - if p_type is not None: - hass.async_create_task( - async_setup_platform(p_type, p_config), eager_start=True - ) - - async def async_platform_discovered( - platform: str, info: DiscoveryInfoType | None - ) -> None: - """Handle for discovered platform.""" - await async_setup_platform(platform, discovery_info=info) - - discovery.async_listen_platform(hass, DOMAIN, async_platform_discovered) - - return True - - -class MailboxEntity(Entity): - """Entity for each mailbox platform to provide a badge display.""" - - def __init__(self, mailbox: Mailbox) -> None: - """Initialize mailbox entity.""" - self.mailbox = mailbox - self.message_count = 0 - - async def async_added_to_hass(self) -> None: - """Complete entity initialization.""" - - @callback - def _mailbox_updated(event: Event) -> None: - self.async_schedule_update_ha_state(True) - - self.hass.bus.async_listen(EVENT, _mailbox_updated) - self.async_schedule_update_ha_state(True) - - @property - def state(self) -> str: - """Return the state of the binary sensor.""" - return str(self.message_count) - - @property - def name(self) -> str: - """Return the name of the entity.""" - return self.mailbox.name - - async def async_update(self) -> None: - """Retrieve messages from platform.""" - messages = await self.mailbox.async_get_messages() - self.message_count = len(messages) - - -class Mailbox: - """Represent a mailbox device.""" - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize mailbox object.""" - self.hass = hass - self.name = name - - @callback - def async_update(self) -> None: - """Send event notification of updated mailbox.""" - self.hass.bus.async_fire(EVENT) - - @property - def media_type(self) -> str: - """Return the supported media type.""" - raise NotImplementedError - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return False - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return False - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - raise NotImplementedError - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - raise NotImplementedError - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - raise NotImplementedError - - -class StreamError(Exception): - """Media streaming exception.""" - - -class MailboxView(HomeAssistantView): - """Base mailbox view.""" - - def __init__(self, mailboxes: list[Mailbox]) -> None: - """Initialize a basic mailbox view.""" - self.mailboxes = mailboxes - - def get_mailbox(self, platform: str) -> Mailbox: - """Retrieve the specified mailbox.""" - for mailbox in self.mailboxes: - if mailbox.name == platform: - return mailbox - raise HTTPNotFound - - -class MailboxPlatformsView(MailboxView): - """View to return the list of mailbox platforms.""" - - url = "/api/mailbox/platforms" - name = "api:mailbox:platforms" - - async def get(self, request: web.Request) -> web.Response: - """Retrieve list of platforms.""" - return self.json( - [ - { - "name": mailbox.name, - "has_media": mailbox.has_media, - "can_delete": mailbox.can_delete, - } - for mailbox in self.mailboxes - ] - ) - - -class MailboxMessageView(MailboxView): - """View to return the list of messages.""" - - url = "/api/mailbox/messages/{platform}" - name = "api:mailbox:messages" - - async def get(self, request: web.Request, platform: str) -> web.Response: - """Retrieve messages.""" - mailbox = self.get_mailbox(platform) - messages = await mailbox.async_get_messages() - return self.json(messages) - - -class MailboxDeleteView(MailboxView): - """View to delete selected messages.""" - - url = "/api/mailbox/delete/{platform}/{msgid}" - name = "api:mailbox:delete" - - async def delete(self, request: web.Request, platform: str, msgid: str) -> None: - """Delete items.""" - mailbox = self.get_mailbox(platform) - await mailbox.async_delete(msgid) - - -class MailboxMediaView(MailboxView): - """View to return a media file.""" - - url = r"/api/mailbox/media/{platform}/{msgid}" - name = "api:asteriskmbox:media" - - async def get( - self, request: web.Request, platform: str, msgid: str - ) -> web.Response: - """Retrieve media.""" - mailbox = self.get_mailbox(platform) - - with suppress(asyncio.CancelledError, TimeoutError): - async with asyncio.timeout(10): - try: - stream = await mailbox.async_get_media(msgid) - except StreamError as err: - _LOGGER.error("Error getting media: %s", err) - return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) - if stream: - return web.Response(body=stream, content_type=mailbox.media_type) - - return web.Response(status=HTTPStatus.INTERNAL_SERVER_ERROR) diff --git a/homeassistant/components/mailbox/manifest.json b/homeassistant/components/mailbox/manifest.json deleted file mode 100644 index 43dd133654c..00000000000 --- a/homeassistant/components/mailbox/manifest.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "domain": "mailbox", - "name": "Mailbox", - "codeowners": [], - "dependencies": ["http"], - "documentation": "https://www.home-assistant.io/integrations/mailbox", - "integration_type": "entity", - "quality_scale": "internal" -} diff --git a/homeassistant/components/mailbox/strings.json b/homeassistant/components/mailbox/strings.json deleted file mode 100644 index 01746e3e98d..00000000000 --- a/homeassistant/components/mailbox/strings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "title": "Mailbox", - "issues": { - "deprecated_mailbox": { - "title": "The mailbox platform is being removed", - "description": "The mailbox platform is being removed. Please report it to the author of the \"{integration_domain}\" custom integration." - } - } -} diff --git a/homeassistant/components/manual/alarm_control_panel.py b/homeassistant/components/manual/alarm_control_panel.py index 5b344dd01ac..c1910d0dfa1 100644 --- a/homeassistant/components/manual/alarm_control_panel.py +++ b/homeassistant/components/manual/alarm_control_panel.py @@ -3,12 +3,12 @@ from __future__ import annotations import datetime -import logging from typing import Any import voluptuous as vol from homeassistant.components.alarm_control_panel import ( + PLATFORM_SCHEMA as ALARM_CONTROL_PANEL_PLATFORM_SCHEMA, AlarmControlPanelEntity, AlarmControlPanelEntityFeature, CodeFormat, @@ -19,8 +19,8 @@ from homeassistant.const import ( CONF_DELAY_TIME, CONF_DISARM_AFTER_TRIGGER, CONF_NAME, - CONF_PLATFORM, CONF_TRIGGER_TIME, + CONF_UNIQUE_ID, STATE_ALARM_ARMED_AWAY, STATE_ALARM_ARMED_CUSTOM_BYPASS, STATE_ALARM_ARMED_HOME, @@ -32,15 +32,16 @@ from homeassistant.const import ( STATE_ALARM_TRIGGERED, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceValidationError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_track_point_in_time from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util -_LOGGER = logging.getLogger(__name__) +DOMAIN = "manual" CONF_ARMING_STATES = "arming_states" CONF_CODE_TEMPLATE = "code_template" @@ -84,7 +85,7 @@ ATTR_PREVIOUS_STATE = "previous_state" ATTR_NEXT_STATE = "next_state" -def _state_validator(config): +def _state_validator(config: dict[str, Any]) -> dict[str, Any]: """Validate the state.""" for state in SUPPORTED_PRETRIGGER_STATES: if CONF_DELAY_TIME not in config[state]: @@ -100,7 +101,7 @@ def _state_validator(config): return config -def _state_schema(state): +def _state_schema(state: str) -> vol.Schema: """Validate the state.""" schema = {} if state in SUPPORTED_PRETRIGGER_STATES: @@ -119,70 +120,73 @@ def _state_schema(state): PLATFORM_SCHEMA = vol.Schema( vol.All( - { - vol.Required(CONF_PLATFORM): "manual", - vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string, - vol.Exclusive(CONF_CODE, "code validation"): cv.string, - vol.Exclusive(CONF_CODE_TEMPLATE, "code validation"): cv.template, - vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean, - vol.Optional(CONF_DELAY_TIME, default=DEFAULT_DELAY_TIME): vol.All( - cv.time_period, cv.positive_timedelta - ), - vol.Optional(CONF_ARMING_TIME, default=DEFAULT_ARMING_TIME): vol.All( - cv.time_period, cv.positive_timedelta - ), - vol.Optional(CONF_TRIGGER_TIME, default=DEFAULT_TRIGGER_TIME): vol.All( - cv.time_period, cv.positive_timedelta - ), - vol.Optional( - CONF_DISARM_AFTER_TRIGGER, default=DEFAULT_DISARM_AFTER_TRIGGER - ): cv.boolean, - vol.Optional(CONF_ARMING_STATES, default=SUPPORTED_ARMING_STATES): vol.All( - cv.ensure_list, [vol.In(SUPPORTED_ARMING_STATES)] - ), - vol.Optional(STATE_ALARM_ARMED_AWAY, default={}): _state_schema( - STATE_ALARM_ARMED_AWAY - ), - vol.Optional(STATE_ALARM_ARMED_HOME, default={}): _state_schema( - STATE_ALARM_ARMED_HOME - ), - vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}): _state_schema( - STATE_ALARM_ARMED_NIGHT - ), - vol.Optional(STATE_ALARM_ARMED_VACATION, default={}): _state_schema( - STATE_ALARM_ARMED_VACATION - ), - vol.Optional(STATE_ALARM_ARMED_CUSTOM_BYPASS, default={}): _state_schema( - STATE_ALARM_ARMED_CUSTOM_BYPASS - ), - vol.Optional(STATE_ALARM_DISARMED, default={}): _state_schema( - STATE_ALARM_DISARMED - ), - vol.Optional(STATE_ALARM_TRIGGERED, default={}): _state_schema( - STATE_ALARM_TRIGGERED - ), - }, + ALARM_CONTROL_PANEL_PLATFORM_SCHEMA.extend( + { + vol.Optional(CONF_NAME, default=DEFAULT_ALARM_NAME): cv.string, + vol.Optional(CONF_UNIQUE_ID): cv.string, + vol.Exclusive(CONF_CODE, "code validation"): cv.string, + vol.Exclusive(CONF_CODE_TEMPLATE, "code validation"): cv.template, + vol.Optional(CONF_CODE_ARM_REQUIRED, default=True): cv.boolean, + vol.Optional(CONF_DELAY_TIME, default=DEFAULT_DELAY_TIME): vol.All( + cv.time_period, cv.positive_timedelta + ), + vol.Optional(CONF_ARMING_TIME, default=DEFAULT_ARMING_TIME): vol.All( + cv.time_period, cv.positive_timedelta + ), + vol.Optional(CONF_TRIGGER_TIME, default=DEFAULT_TRIGGER_TIME): vol.All( + cv.time_period, cv.positive_timedelta + ), + vol.Optional( + CONF_DISARM_AFTER_TRIGGER, default=DEFAULT_DISARM_AFTER_TRIGGER + ): cv.boolean, + vol.Optional( + CONF_ARMING_STATES, default=SUPPORTED_ARMING_STATES + ): vol.All(cv.ensure_list, [vol.In(SUPPORTED_ARMING_STATES)]), + vol.Optional(STATE_ALARM_ARMED_AWAY, default={}): _state_schema( + STATE_ALARM_ARMED_AWAY + ), + vol.Optional(STATE_ALARM_ARMED_HOME, default={}): _state_schema( + STATE_ALARM_ARMED_HOME + ), + vol.Optional(STATE_ALARM_ARMED_NIGHT, default={}): _state_schema( + STATE_ALARM_ARMED_NIGHT + ), + vol.Optional(STATE_ALARM_ARMED_VACATION, default={}): _state_schema( + STATE_ALARM_ARMED_VACATION + ), + vol.Optional( + STATE_ALARM_ARMED_CUSTOM_BYPASS, default={} + ): _state_schema(STATE_ALARM_ARMED_CUSTOM_BYPASS), + vol.Optional(STATE_ALARM_DISARMED, default={}): _state_schema( + STATE_ALARM_DISARMED + ), + vol.Optional(STATE_ALARM_TRIGGERED, default={}): _state_schema( + STATE_ALARM_TRIGGERED + ), + }, + ), _state_validator, ) ) -def setup_platform( +async def async_setup_platform( hass: HomeAssistant, config: ConfigType, - add_entities: AddEntitiesCallback, + async_add_entities: AddEntitiesCallback, discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the manual alarm platform.""" - add_entities( + async_add_entities( [ ManualAlarm( hass, config[CONF_NAME], + config.get(CONF_UNIQUE_ID), config.get(CONF_CODE), config.get(CONF_CODE_TEMPLATE), - config.get(CONF_CODE_ARM_REQUIRED), - config.get(CONF_DISARM_AFTER_TRIGGER, DEFAULT_DISARM_AFTER_TRIGGER), + config[CONF_CODE_ARM_REQUIRED], + config[CONF_DISARM_AFTER_TRIGGER], config, ) ] @@ -203,27 +207,25 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): def __init__( self, - hass, - name, - code, - code_template, - code_arm_required, - disarm_after_trigger, - config, - ): + hass: HomeAssistant, + name: str, + unique_id: str | None, + code: str | None, + code_template: Template | None, + code_arm_required: bool, + disarm_after_trigger: bool, + config: dict[str, Any], + ) -> None: """Init the manual alarm panel.""" self._state = STATE_ALARM_DISARMED self._hass = hass self._attr_name = name - if code_template: - self._code = code_template - self._code.hass = hass - else: - self._code = code or None + self._attr_unique_id = unique_id + self._code = code_template or code or None self._attr_code_arm_required = code_arm_required self._disarm_after_trigger = disarm_after_trigger self._previous_state = self._state - self._state_ts = None + self._state_ts: datetime.datetime = dt_util.utcnow() self._delay_time_by_state = { state: config[state][CONF_DELAY_TIME] @@ -249,7 +251,9 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): if self._state == STATE_ALARM_TRIGGERED: if self._within_pending_time(self._state): return STATE_ALARM_PENDING - trigger_time = self._trigger_time_by_state[self._previous_state] + trigger_time: datetime.timedelta = self._trigger_time_by_state[ + self._previous_state + ] if ( self._state_ts + self._pending_time(self._state) + trigger_time ) < dt_util.utcnow(): @@ -266,25 +270,27 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): return self._state @property - def _active_state(self): + def _active_state(self) -> str: """Get the current state.""" if self.state in (STATE_ALARM_PENDING, STATE_ALARM_ARMING): return self._previous_state return self._state - def _arming_time(self, state): + def _arming_time(self, state: str) -> datetime.timedelta: """Get the arming time.""" - return self._arming_time_by_state[state] + arming_time: datetime.timedelta = self._arming_time_by_state[state] + return arming_time - def _pending_time(self, state): + def _pending_time(self, state: str) -> datetime.timedelta: """Get the pending time.""" - return self._delay_time_by_state[self._previous_state] + delay_time: datetime.timedelta = self._delay_time_by_state[self._previous_state] + return delay_time - def _within_arming_time(self, state): + def _within_arming_time(self, state: str) -> bool: """Get if the action is in the arming time window.""" return self._state_ts + self._arming_time(state) > dt_util.utcnow() - def _within_pending_time(self, state): + def _within_pending_time(self, state: str) -> bool: """Get if the action is in the pending time window.""" return self._state_ts + self._pending_time(state) > dt_util.utcnow() @@ -373,7 +379,7 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): self._state_ts + arming_time, ) - def _async_validate_code(self, code, state): + def _async_validate_code(self, code: str | None, state: str) -> None: """Validate given code.""" if ( state != STATE_ALARM_DISARMED and not self.code_arm_required @@ -390,24 +396,28 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): if not alarm_code or code == alarm_code: return - raise HomeAssistantError("Invalid alarm code provided") + raise ServiceValidationError( + "Invalid alarm code provided", + translation_domain=DOMAIN, + translation_key="invalid_code", + ) @property def extra_state_attributes(self) -> dict[str, Any]: """Return the state attributes.""" if self.state in (STATE_ALARM_PENDING, STATE_ALARM_ARMING): - return { - ATTR_PREVIOUS_STATE: self._previous_state, - ATTR_NEXT_STATE: self._state, - } - if self.state == STATE_ALARM_TRIGGERED: - return { - ATTR_PREVIOUS_STATE: self._previous_state, - } - return {} + prev_state: str | None = self._previous_state + state: str | None = self._state + elif self.state == STATE_ALARM_TRIGGERED: + prev_state = self._previous_state + state = None + else: + prev_state = None + state = None + return {ATTR_PREVIOUS_STATE: prev_state, ATTR_NEXT_STATE: state} @callback - def async_scheduled_update(self, now): + def async_scheduled_update(self, now: datetime.datetime) -> None: """Update state at a scheduled point in time.""" self.async_write_ha_state() @@ -416,13 +426,13 @@ class ManualAlarm(AlarmControlPanelEntity, RestoreEntity): await super().async_added_to_hass() if state := await self.async_get_last_state(): self._state_ts = state.last_updated - if hasattr(state, "attributes") and ATTR_NEXT_STATE in state.attributes: + if next_state := state.attributes.get(ATTR_NEXT_STATE): # If in arming or pending state we record the transition, # not the current state - self._state = state.attributes[ATTR_NEXT_STATE] + self._state = next_state else: self._state = state.state - if hasattr(state, "attributes") and ATTR_PREVIOUS_STATE in state.attributes: - self._previous_state = state.attributes[ATTR_PREVIOUS_STATE] + if prev_state := state.attributes.get(ATTR_PREVIOUS_STATE): + self._previous_state = prev_state self._async_set_state_update_events() diff --git a/homeassistant/components/manual/manifest.json b/homeassistant/components/manual/manifest.json index 7406ab26830..37ba45c2dda 100644 --- a/homeassistant/components/manual/manifest.json +++ b/homeassistant/components/manual/manifest.json @@ -3,6 +3,7 @@ "name": "Manual Alarm Control Panel", "codeowners": [], "documentation": "https://www.home-assistant.io/integrations/manual", + "integration_type": "helper", "iot_class": "calculated", "quality_scale": "internal" } diff --git a/homeassistant/components/manual/strings.json b/homeassistant/components/manual/strings.json new file mode 100644 index 00000000000..f26a1570d05 --- /dev/null +++ b/homeassistant/components/manual/strings.json @@ -0,0 +1,7 @@ +{ + "exceptions": { + "invalid_code": { + "message": "Invalid alarm code provided" + } + } +} diff --git a/homeassistant/components/manual_mqtt/alarm_control_panel.py b/homeassistant/components/manual_mqtt/alarm_control_panel.py index 26946a2a45c..8d447bbc8ac 100644 --- a/homeassistant/components/manual_mqtt/alarm_control_panel.py +++ b/homeassistant/components/manual_mqtt/alarm_control_panel.py @@ -273,7 +273,6 @@ class ManualMQTTAlarm(AlarmControlPanelEntity): self._attr_name = name if code_template: self._code = code_template - self._code.hass = hass else: self._code = code or None self._disarm_after_trigger = disarm_after_trigger diff --git a/homeassistant/components/mastodon/__init__.py b/homeassistant/components/mastodon/__init__.py index 2fe379702ee..e8d23434248 100644 --- a/homeassistant/components/mastodon/__init__.py +++ b/homeassistant/components/mastodon/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +from dataclasses import dataclass + from mastodon.Mastodon import Mastodon, MastodonError from homeassistant.config_entries import ConfigEntry @@ -15,16 +17,33 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import discovery +from homeassistant.util import slugify -from .const import CONF_BASE_URL, DOMAIN -from .utils import create_mastodon_client +from .const import CONF_BASE_URL, DOMAIN, LOGGER +from .coordinator import MastodonCoordinator +from .utils import construct_mastodon_username, create_mastodon_client + +PLATFORMS: list[Platform] = [Platform.NOTIFY, Platform.SENSOR] -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +@dataclass +class MastodonData: + """Mastodon data type.""" + + client: Mastodon + instance: dict + account: dict + coordinator: MastodonCoordinator + + +type MastodonConfigEntry = ConfigEntry[MastodonData] + + +async def async_setup_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> bool: """Set up Mastodon from a config entry.""" try: - client, _, _ = await hass.async_add_executor_job( + client, instance, account = await hass.async_add_executor_job( setup_mastodon, entry, ) @@ -34,6 +53,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: assert entry.unique_id + coordinator = MastodonCoordinator(hass, client) + + await coordinator.async_config_entry_first_refresh() + + entry.runtime_data = MastodonData(client, instance, account, coordinator) + await discovery.async_load_platform( hass, Platform.NOTIFY, @@ -42,6 +67,49 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: {}, ) + await hass.config_entries.async_forward_entry_setups( + entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY] + ) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: MastodonConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms( + entry, [platform for platform in PLATFORMS if platform != Platform.NOTIFY] + ) + + +async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: + """Migrate old config.""" + + if entry.version == 1 and entry.minor_version == 1: + # Version 1.1 had the unique_id as client_id, this isn't necessarily unique + LOGGER.debug("Migrating config entry from version %s", entry.version) + + try: + _, instance, account = await hass.async_add_executor_job( + setup_mastodon, + entry, + ) + except MastodonError as ex: + LOGGER.error("Migration failed with error %s", ex) + return False + + hass.config_entries.async_update_entry( + entry, + minor_version=2, + unique_id=slugify(construct_mastodon_username(instance, account)), + ) + + LOGGER.debug( + "Entry %s successfully migrated to version %s.%s", + entry.entry_id, + entry.version, + entry.minor_version, + ) + return True diff --git a/homeassistant/components/mastodon/config_flow.py b/homeassistant/components/mastodon/config_flow.py index 7d1c9396cbb..5c9419cd12d 100644 --- a/homeassistant/components/mastodon/config_flow.py +++ b/homeassistant/components/mastodon/config_flow.py @@ -19,7 +19,7 @@ from homeassistant.helpers.selector import ( TextSelectorConfig, TextSelectorType, ) -from homeassistant.helpers.typing import ConfigType +from homeassistant.util import slugify from .const import CONF_BASE_URL, DEFAULT_URL, DOMAIN, LOGGER from .utils import construct_mastodon_username, create_mastodon_client @@ -47,6 +47,7 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 + MINOR_VERSION = 2 config_entry: ConfigEntry def check_connection( @@ -105,10 +106,6 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initialized by the user.""" errors: dict[str, str] | None = None if user_input: - self._async_abort_entries_match( - {CONF_CLIENT_ID: user_input[CONF_CLIENT_ID]} - ) - instance, account, errors = await self.hass.async_add_executor_job( self.check_connection, user_input[CONF_BASE_URL], @@ -119,7 +116,8 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): if not errors: name = construct_mastodon_username(instance, account) - await self.async_set_unique_id(user_input[CONF_CLIENT_ID]) + await self.async_set_unique_id(slugify(name)) + self._abort_if_unique_id_configured() return self.async_create_entry( title=name, data=user_input, @@ -127,17 +125,17 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): return self.show_user_form(user_input, errors) - async def async_step_import(self, import_config: ConfigType) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" errors: dict[str, str] | None = None LOGGER.debug("Importing Mastodon from configuration.yaml") - base_url = str(import_config.get(CONF_BASE_URL, DEFAULT_URL)) - client_id = str(import_config.get(CONF_CLIENT_ID)) - client_secret = str(import_config.get(CONF_CLIENT_SECRET)) - access_token = str(import_config.get(CONF_ACCESS_TOKEN)) - name = import_config.get(CONF_NAME, None) + base_url = str(import_data.get(CONF_BASE_URL, DEFAULT_URL)) + client_id = str(import_data.get(CONF_CLIENT_ID)) + client_secret = str(import_data.get(CONF_CLIENT_SECRET)) + access_token = str(import_data.get(CONF_ACCESS_TOKEN)) + name = import_data.get(CONF_NAME) instance, account, errors = await self.hass.async_add_executor_job( self.check_connection, @@ -148,7 +146,8 @@ class MastodonConfigFlow(ConfigFlow, domain=DOMAIN): ) if not errors: - await self.async_set_unique_id(client_id) + name = construct_mastodon_username(instance, account) + await self.async_set_unique_id(slugify(name)) self._abort_if_unique_id_configured() if not name: diff --git a/homeassistant/components/mastodon/const.py b/homeassistant/components/mastodon/const.py index 3a9cf7462e6..e0593d15d2c 100644 --- a/homeassistant/components/mastodon/const.py +++ b/homeassistant/components/mastodon/const.py @@ -16,3 +16,6 @@ INSTANCE_VERSION: Final = "version" INSTANCE_URI: Final = "uri" INSTANCE_DOMAIN: Final = "domain" ACCOUNT_USERNAME: Final = "username" +ACCOUNT_FOLLOWERS_COUNT: Final = "followers_count" +ACCOUNT_FOLLOWING_COUNT: Final = "following_count" +ACCOUNT_STATUSES_COUNT: Final = "statuses_count" diff --git a/homeassistant/components/mastodon/coordinator.py b/homeassistant/components/mastodon/coordinator.py new file mode 100644 index 00000000000..f1332a0ea43 --- /dev/null +++ b/homeassistant/components/mastodon/coordinator.py @@ -0,0 +1,35 @@ +"""Define an object to manage fetching Mastodon data.""" + +from __future__ import annotations + +from datetime import timedelta +from typing import Any + +from mastodon import Mastodon +from mastodon.Mastodon import MastodonError + +from homeassistant.core import HomeAssistant +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import LOGGER + + +class MastodonCoordinator(DataUpdateCoordinator[dict[str, Any]]): + """Class to manage fetching Mastodon data.""" + + def __init__(self, hass: HomeAssistant, client: Mastodon) -> None: + """Initialize coordinator.""" + super().__init__( + hass, logger=LOGGER, name="Mastodon", update_interval=timedelta(hours=1) + ) + self.client = client + + async def _async_update_data(self) -> dict[str, Any]: + try: + account: dict = await self.hass.async_add_executor_job( + self.client.account_verify_credentials + ) + except MastodonError as ex: + raise UpdateFailed(ex) from ex + + return account diff --git a/homeassistant/components/mastodon/diagnostics.py b/homeassistant/components/mastodon/diagnostics.py new file mode 100644 index 00000000000..7246ae9cf63 --- /dev/null +++ b/homeassistant/components/mastodon/diagnostics.py @@ -0,0 +1,35 @@ +"""Diagnostics support for the Mastodon integration.""" + +from __future__ import annotations + +from typing import Any + +from homeassistant.core import HomeAssistant + +from . import MastodonConfigEntry + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, config_entry: MastodonConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + + instance, account = await hass.async_add_executor_job( + get_diagnostics, + config_entry, + ) + + return { + "instance": instance, + "account": account, + } + + +def get_diagnostics(config_entry: MastodonConfigEntry) -> tuple[dict, dict]: + """Get mastodon diagnostics.""" + client = config_entry.runtime_data.client + + instance = client.instance() + account = client.account_verify_credentials() + + return instance, account diff --git a/homeassistant/components/mastodon/entity.py b/homeassistant/components/mastodon/entity.py new file mode 100644 index 00000000000..93d630627d7 --- /dev/null +++ b/homeassistant/components/mastodon/entity.py @@ -0,0 +1,48 @@ +"""Base class for Mastodon entities.""" + +from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo +from homeassistant.helpers.entity import EntityDescription +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import MastodonConfigEntry +from .const import DEFAULT_NAME, DOMAIN, INSTANCE_VERSION +from .coordinator import MastodonCoordinator +from .utils import construct_mastodon_username + + +class MastodonEntity(CoordinatorEntity[MastodonCoordinator]): + """Defines a base Mastodon entity.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: MastodonCoordinator, + entity_description: EntityDescription, + data: MastodonConfigEntry, + ) -> None: + """Initialize Mastodon entity.""" + super().__init__(coordinator) + unique_id = data.unique_id + assert unique_id is not None + self._attr_unique_id = f"{unique_id}_{entity_description.key}" + + # Legacy yaml config default title is Mastodon, don't make name Mastodon Mastodon + name = "Mastodon" + if data.title != DEFAULT_NAME: + name = f"Mastodon {data.title}" + + full_account_name = construct_mastodon_username( + data.runtime_data.instance, data.runtime_data.account + ) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, unique_id)}, + manufacturer="Mastodon gGmbH", + model=full_account_name, + entry_type=DeviceEntryType.SERVICE, + sw_version=data.runtime_data.instance[INSTANCE_VERSION], + name=name, + ) + + self.entity_description = entity_description diff --git a/homeassistant/components/mastodon/icons.json b/homeassistant/components/mastodon/icons.json new file mode 100644 index 00000000000..082e27a64c2 --- /dev/null +++ b/homeassistant/components/mastodon/icons.json @@ -0,0 +1,15 @@ +{ + "entity": { + "sensor": { + "followers": { + "default": "mdi:account-multiple" + }, + "following": { + "default": "mdi:account-multiple" + }, + "posts": { + "default": "mdi:message-text" + } + } + } +} diff --git a/homeassistant/components/mastodon/sensor.py b/homeassistant/components/mastodon/sensor.py new file mode 100644 index 00000000000..12acfc04743 --- /dev/null +++ b/homeassistant/components/mastodon/sensor.py @@ -0,0 +1,85 @@ +"""Mastodon platform for sensor components.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any + +from homeassistant.components.sensor import ( + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import StateType + +from . import MastodonConfigEntry +from .const import ( + ACCOUNT_FOLLOWERS_COUNT, + ACCOUNT_FOLLOWING_COUNT, + ACCOUNT_STATUSES_COUNT, +) +from .entity import MastodonEntity + + +@dataclass(frozen=True, kw_only=True) +class MastodonSensorEntityDescription(SensorEntityDescription): + """Describes Mastodon sensor entity.""" + + value_fn: Callable[[dict[str, Any]], StateType] + + +ENTITY_DESCRIPTIONS = ( + MastodonSensorEntityDescription( + key="followers", + translation_key="followers", + native_unit_of_measurement="accounts", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.get(ACCOUNT_FOLLOWERS_COUNT), + ), + MastodonSensorEntityDescription( + key="following", + translation_key="following", + native_unit_of_measurement="accounts", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.get(ACCOUNT_FOLLOWING_COUNT), + ), + MastodonSensorEntityDescription( + key="posts", + translation_key="posts", + native_unit_of_measurement="posts", + state_class=SensorStateClass.TOTAL, + value_fn=lambda data: data.get(ACCOUNT_STATUSES_COUNT), + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + entry: MastodonConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the sensor platform for entity.""" + coordinator = entry.runtime_data.coordinator + + async_add_entities( + MastodonSensorEntity( + coordinator=coordinator, + entity_description=entity_description, + data=entry, + ) + for entity_description in ENTITY_DESCRIPTIONS + ) + + +class MastodonSensorEntity(MastodonEntity, SensorEntity): + """A Mastodon sensor entity.""" + + entity_description: MastodonSensorEntityDescription + + @property + def native_value(self) -> StateType: + """Return the native value of the sensor.""" + return self.entity_description.value_fn(self.coordinator.data) diff --git a/homeassistant/components/mastodon/strings.json b/homeassistant/components/mastodon/strings.json index e1124aad1a9..906b67dd481 100644 --- a/homeassistant/components/mastodon/strings.json +++ b/homeassistant/components/mastodon/strings.json @@ -4,8 +4,8 @@ "user": { "data": { "base_url": "[%key:common::config_flow::data::url%]", - "client_id": "Client Key", - "client_secret": "Client Secret", + "client_id": "Client key", + "client_secret": "Client secret", "access_token": "[%key:common::config_flow::data::access_token%]" }, "data_description": { @@ -35,5 +35,18 @@ "title": "YAML import failed with unknown error", "description": "Configuring {integration_title} using YAML is being removed but there was an unknown error while importing your existing configuration.\nPlease use the UI to configure Mastodon. Don't forget to delete the YAML configuration." } + }, + "entity": { + "sensor": { + "followers": { + "name": "Followers" + }, + "following": { + "name": "Following" + }, + "posts": { + "name": "Posts" + } + } } } diff --git a/homeassistant/components/matrix/icons.json b/homeassistant/components/matrix/icons.json index 4fc56ebe0ff..a8b83e67303 100644 --- a/homeassistant/components/matrix/icons.json +++ b/homeassistant/components/matrix/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_message": "mdi:matrix" + "send_message": { + "service": "mdi:matrix" + } } } diff --git a/homeassistant/components/matrix/manifest.json b/homeassistant/components/matrix/manifest.json index 7e854a85434..3c465c44f24 100644 --- a/homeassistant/components/matrix/manifest.json +++ b/homeassistant/components/matrix/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/matrix", "iot_class": "cloud_push", "loggers": ["matrix_client"], - "requirements": ["matrix-nio==0.24.0", "Pillow==10.4.0"] + "requirements": ["matrix-nio==0.25.0", "Pillow==10.4.0"] } diff --git a/homeassistant/components/matter/sensor.py b/homeassistant/components/matter/sensor.py index c3ab18072f0..5d4ad900d8e 100644 --- a/homeassistant/components/matter/sensor.py +++ b/homeassistant/components/matter/sensor.py @@ -384,7 +384,7 @@ DISCOVERY_SCHEMAS = [ key="ThirdRealityEnergySensorWattAccumulated", device_class=SensorDeviceClass.ENERGY, entity_category=EntityCategory.DIAGNOSTIC, - native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, suggested_display_precision=3, state_class=SensorStateClass.TOTAL_INCREASING, measurement_to_ha=lambda x: x / 1000, diff --git a/homeassistant/components/mealie/__init__.py b/homeassistant/components/mealie/__init__.py index 5c9c91729c0..bf0fbcac406 100644 --- a/homeassistant/components/mealie/__init__.py +++ b/homeassistant/components/mealie/__init__.py @@ -48,6 +48,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: MealieConfigEntry) -> bo ), ) try: + await client.define_household_support() about = await client.get_about() version = create_version(about.version) except MealieAuthenticationError as error: diff --git a/homeassistant/components/mealie/icons.json b/homeassistant/components/mealie/icons.json index 16176391701..d7e29cc8bbe 100644 --- a/homeassistant/components/mealie/icons.json +++ b/homeassistant/components/mealie/icons.json @@ -24,10 +24,20 @@ } }, "services": { - "get_mealplan": "mdi:food", - "get_recipe": "mdi:map", - "import_recipe": "mdi:map-search", - "set_random_mealplan": "mdi:dice-multiple", - "set_mealplan": "mdi:food" + "get_mealplan": { + "service": "mdi:food" + }, + "get_recipe": { + "service": "mdi:map" + }, + "import_recipe": { + "service": "mdi:map-search" + }, + "set_random_mealplan": { + "service": "mdi:dice-multiple" + }, + "set_mealplan": { + "service": "mdi:food" + } } } diff --git a/homeassistant/components/mealie/manifest.json b/homeassistant/components/mealie/manifest.json index 75093577b0f..4fabdffadc4 100644 --- a/homeassistant/components/mealie/manifest.json +++ b/homeassistant/components/mealie/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/mealie", "integration_type": "service", "iot_class": "local_polling", - "requirements": ["aiomealie==0.8.1"] + "requirements": ["aiomealie==0.9.2"] } diff --git a/homeassistant/components/media_extractor/config_flow.py b/homeassistant/components/media_extractor/config_flow.py index 4343d0551e0..b91942d7b13 100644 --- a/homeassistant/components/media_extractor/config_flow.py +++ b/homeassistant/components/media_extractor/config_flow.py @@ -25,8 +25,6 @@ class MediaExtractorConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=vol.Schema({})) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Handle import.""" return self.async_create_entry(title="Media extractor", data={}) diff --git a/homeassistant/components/media_extractor/icons.json b/homeassistant/components/media_extractor/icons.json index 7abc4410b19..611db7c944c 100644 --- a/homeassistant/components/media_extractor/icons.json +++ b/homeassistant/components/media_extractor/icons.json @@ -1,6 +1,10 @@ { "services": { - "play_media": "mdi:play", - "extract_media_url": "mdi:link" + "play_media": { + "service": "mdi:play" + }, + "extract_media_url": { + "service": "mdi:link" + } } } diff --git a/homeassistant/components/media_player/__init__.py b/homeassistant/components/media_player/__init__.py index d499ee8d6d3..beb672a1e58 100644 --- a/homeassistant/components/media_player/__init__.py +++ b/homeassistant/components/media_player/__init__.py @@ -274,59 +274,59 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_TURN_ON, {}, "async_turn_on", [MediaPlayerEntityFeature.TURN_ON] + SERVICE_TURN_ON, None, "async_turn_on", [MediaPlayerEntityFeature.TURN_ON] ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [MediaPlayerEntityFeature.TURN_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [MediaPlayerEntityFeature.TURN_OFF] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [MediaPlayerEntityFeature.TURN_OFF | MediaPlayerEntityFeature.TURN_ON], ) component.async_register_entity_service( SERVICE_VOLUME_UP, - {}, + None, "async_volume_up", [MediaPlayerEntityFeature.VOLUME_SET, MediaPlayerEntityFeature.VOLUME_STEP], ) component.async_register_entity_service( SERVICE_VOLUME_DOWN, - {}, + None, "async_volume_down", [MediaPlayerEntityFeature.VOLUME_SET, MediaPlayerEntityFeature.VOLUME_STEP], ) component.async_register_entity_service( SERVICE_MEDIA_PLAY_PAUSE, - {}, + None, "async_media_play_pause", [MediaPlayerEntityFeature.PLAY | MediaPlayerEntityFeature.PAUSE], ) component.async_register_entity_service( - SERVICE_MEDIA_PLAY, {}, "async_media_play", [MediaPlayerEntityFeature.PLAY] + SERVICE_MEDIA_PLAY, None, "async_media_play", [MediaPlayerEntityFeature.PLAY] ) component.async_register_entity_service( - SERVICE_MEDIA_PAUSE, {}, "async_media_pause", [MediaPlayerEntityFeature.PAUSE] + SERVICE_MEDIA_PAUSE, None, "async_media_pause", [MediaPlayerEntityFeature.PAUSE] ) component.async_register_entity_service( - SERVICE_MEDIA_STOP, {}, "async_media_stop", [MediaPlayerEntityFeature.STOP] + SERVICE_MEDIA_STOP, None, "async_media_stop", [MediaPlayerEntityFeature.STOP] ) component.async_register_entity_service( SERVICE_MEDIA_NEXT_TRACK, - {}, + None, "async_media_next_track", [MediaPlayerEntityFeature.NEXT_TRACK], ) component.async_register_entity_service( SERVICE_MEDIA_PREVIOUS_TRACK, - {}, + None, "async_media_previous_track", [MediaPlayerEntityFeature.PREVIOUS_TRACK], ) component.async_register_entity_service( SERVICE_CLEAR_PLAYLIST, - {}, + None, "async_clear_playlist", [MediaPlayerEntityFeature.CLEAR_PLAYLIST], ) @@ -423,7 +423,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: [MediaPlayerEntityFeature.SHUFFLE_SET], ) component.async_register_entity_service( - SERVICE_UNJOIN, {}, "async_unjoin_player", [MediaPlayerEntityFeature.GROUPING] + SERVICE_UNJOIN, None, "async_unjoin_player", [MediaPlayerEntityFeature.GROUPING] ) component.async_register_entity_service( diff --git a/homeassistant/components/media_player/icons.json b/homeassistant/components/media_player/icons.json index 847ce5989d6..c11211c38ec 100644 --- a/homeassistant/components/media_player/icons.json +++ b/homeassistant/components/media_player/icons.json @@ -32,27 +32,71 @@ } }, "services": { - "clear_playlist": "mdi:playlist-remove", - "join": "mdi:group", - "media_next_track": "mdi:skip-next", - "media_pause": "mdi:pause", - "media_play": "mdi:play", - "media_play_pause": "mdi:play-pause", - "media_previous_track": "mdi:skip-previous", - "media_seek": "mdi:fast-forward", - "media_stop": "mdi:stop", - "play_media": "mdi:play", - "repeat_set": "mdi:repeat", - "select_sound_mode": "mdi:surround-sound", - "select_source": "mdi:import", - "shuffle_set": "mdi:shuffle", - "toggle": "mdi:play-pause", - "turn_off": "mdi:power", - "turn_on": "mdi:power", - "unjoin": "mdi:ungroup", - "volume_down": "mdi:volume-minus", - "volume_mute": "mdi:volume-mute", - "volume_set": "mdi:volume-medium", - "volume_up": "mdi:volume-plus" + "clear_playlist": { + "service": "mdi:playlist-remove" + }, + "join": { + "service": "mdi:group" + }, + "media_next_track": { + "service": "mdi:skip-next" + }, + "media_pause": { + "service": "mdi:pause" + }, + "media_play": { + "service": "mdi:play" + }, + "media_play_pause": { + "service": "mdi:play-pause" + }, + "media_previous_track": { + "service": "mdi:skip-previous" + }, + "media_seek": { + "service": "mdi:fast-forward" + }, + "media_stop": { + "service": "mdi:stop" + }, + "play_media": { + "service": "mdi:play" + }, + "repeat_set": { + "service": "mdi:repeat" + }, + "select_sound_mode": { + "service": "mdi:surround-sound" + }, + "select_source": { + "service": "mdi:import" + }, + "shuffle_set": { + "service": "mdi:shuffle" + }, + "toggle": { + "service": "mdi:play-pause" + }, + "turn_off": { + "service": "mdi:power" + }, + "turn_on": { + "service": "mdi:power" + }, + "unjoin": { + "service": "mdi:ungroup" + }, + "volume_down": { + "service": "mdi:volume-minus" + }, + "volume_mute": { + "service": "mdi:volume-mute" + }, + "volume_set": { + "service": "mdi:volume-medium" + }, + "volume_up": { + "service": "mdi:volume-plus" + } } } diff --git a/homeassistant/components/melcloud/icons.json b/homeassistant/components/melcloud/icons.json index de3eb3c0ba2..b91696b5b35 100644 --- a/homeassistant/components/melcloud/icons.json +++ b/homeassistant/components/melcloud/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "set_vane_horizontal": "mdi:arrow-left-right", - "set_vane_vertical": "mdi:arrow-up-down" + "set_vane_horizontal": { + "service": "mdi:arrow-left-right" + }, + "set_vane_vertical": { + "service": "mdi:arrow-up-down" + } } } diff --git a/homeassistant/components/meteoalarm/manifest.json b/homeassistant/components/meteoalarm/manifest.json index 9a41e8a3062..4de91f6a431 100644 --- a/homeassistant/components/meteoalarm/manifest.json +++ b/homeassistant/components/meteoalarm/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/meteoalarm", "iot_class": "cloud_polling", "loggers": ["meteoalertapi"], - "requirements": ["meteoalertapi==0.3.0"] + "requirements": ["meteoalertapi==0.3.1"] } diff --git a/homeassistant/components/meteoclimatic/config_flow.py b/homeassistant/components/meteoclimatic/config_flow.py index d772a6c9d62..59877941fee 100644 --- a/homeassistant/components/meteoclimatic/config_flow.py +++ b/homeassistant/components/meteoclimatic/config_flow.py @@ -1,12 +1,13 @@ """Config flow to configure the Meteoclimatic integration.""" import logging +from typing import Any from meteoclimatic import MeteoclimaticClient from meteoclimatic.exceptions import MeteoclimaticError, StationNotFound import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import CONF_STATION_CODE, DOMAIN @@ -35,9 +36,11 @@ class MeteoclimaticFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors or {}, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is None: return self._show_setup_form(user_input, errors) diff --git a/homeassistant/components/microsoft_face/icons.json b/homeassistant/components/microsoft_face/icons.json index 826e390197a..6e61676224d 100644 --- a/homeassistant/components/microsoft_face/icons.json +++ b/homeassistant/components/microsoft_face/icons.json @@ -1,10 +1,22 @@ { "services": { - "create_group": "mdi:account-multiple-plus", - "create_person": "mdi:account-plus", - "delete_group": "mdi:account-multiple-remove", - "delete_person": "mdi:account-remove", - "face_person": "mdi:face-man", - "train_group": "mdi:account-multiple-check" + "create_group": { + "service": "mdi:account-multiple-plus" + }, + "create_person": { + "service": "mdi:account-plus" + }, + "delete_group": { + "service": "mdi:account-multiple-remove" + }, + "delete_person": { + "service": "mdi:account-remove" + }, + "face_person": { + "service": "mdi:face-man" + }, + "train_group": { + "service": "mdi:account-multiple-check" + } } } diff --git a/homeassistant/components/mikrotik/__init__.py b/homeassistant/components/mikrotik/__init__.py index 9f2b40bf1c8..cecf96a6c3e 100644 --- a/homeassistant/components/mikrotik/__init__.py +++ b/homeassistant/components/mikrotik/__init__.py @@ -4,14 +4,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from .const import ATTR_MANUFACTURER, DOMAIN from .coordinator import MikrotikDataUpdateCoordinator, get_api from .errors import CannotConnect, LoginError -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [Platform.DEVICE_TRACKER] type MikrotikConfigEntry = ConfigEntry[MikrotikDataUpdateCoordinator] diff --git a/homeassistant/components/mikrotik/config_flow.py b/homeassistant/components/mikrotik/config_flow.py index fe0d020d373..6035565acf1 100644 --- a/homeassistant/components/mikrotik/config_flow.py +++ b/homeassistant/components/mikrotik/config_flow.py @@ -83,7 +83,9 @@ class MikrotikFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_reauth(self, data: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self._reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] diff --git a/homeassistant/components/mill/config_flow.py b/homeassistant/components/mill/config_flow.py index 58660d6358e..db1b2711575 100644 --- a/homeassistant/components/mill/config_flow.py +++ b/homeassistant/components/mill/config_flow.py @@ -1,10 +1,12 @@ """Adds config flow for Mill integration.""" +from typing import Any + from mill import Mill from mill_local import Mill as MillLocal import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_USERNAME from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -16,7 +18,9 @@ class MillConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" data_schema = vol.Schema( { diff --git a/homeassistant/components/mill/icons.json b/homeassistant/components/mill/icons.json index 13d6bb650c1..f2595f28057 100644 --- a/homeassistant/components/mill/icons.json +++ b/homeassistant/components/mill/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_room_temperature": "mdi:thermometer" + "set_room_temperature": { + "service": "mdi:thermometer" + } } } diff --git a/homeassistant/components/min_max/icons.json b/homeassistant/components/min_max/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/min_max/icons.json +++ b/homeassistant/components/min_max/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/minio/__init__.py b/homeassistant/components/minio/__init__.py index e2cbcdf9ed1..e5470cc3313 100644 --- a/homeassistant/components/minio/__init__.py +++ b/homeassistant/components/minio/__init__.py @@ -127,7 +127,6 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: def _render_service_value(service, key): value = service.data[key] - value.hass = hass return value.async_render(parse_result=False) def put_file(service: ServiceCall) -> None: diff --git a/homeassistant/components/minio/icons.json b/homeassistant/components/minio/icons.json index 16deb1a168d..dce148a23de 100644 --- a/homeassistant/components/minio/icons.json +++ b/homeassistant/components/minio/icons.json @@ -1,7 +1,13 @@ { "services": { - "get": "mdi:cloud-download", - "put": "mdi:cloud-upload", - "remove": "mdi:delete" + "get": { + "service": "mdi:cloud-download" + }, + "put": { + "service": "mdi:cloud-upload" + }, + "remove": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/mobile_app/config_flow.py b/homeassistant/components/mobile_app/config_flow.py index 66035733c33..bd72b2d7f42 100644 --- a/homeassistant/components/mobile_app/config_flow.py +++ b/homeassistant/components/mobile_app/config_flow.py @@ -1,9 +1,10 @@ """Config flow for Mobile App.""" +from typing import Any import uuid from homeassistant.components import person -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ATTR_DEVICE_ID from homeassistant.helpers import entity_registry as er @@ -15,7 +16,9 @@ class MobileAppFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" placeholders = { "apps_url": "https://www.home-assistant.io/integrations/mobile_app/#apps" diff --git a/homeassistant/components/mobile_app/device_action.py b/homeassistant/components/mobile_app/device_action.py index bebdef0e917..dccff926b34 100644 --- a/homeassistant/components/mobile_app/device_action.py +++ b/homeassistant/components/mobile_app/device_action.py @@ -64,7 +64,6 @@ async def async_call_action_from_config( continue value_template = config[key] - template.attach(hass, value_template) try: service_data[key] = template.render_complex(value_template, variables) diff --git a/homeassistant/components/modbus/icons.json b/homeassistant/components/modbus/icons.json index eeaeff6403b..05ee76fd44e 100644 --- a/homeassistant/components/modbus/icons.json +++ b/homeassistant/components/modbus/icons.json @@ -1,9 +1,19 @@ { "services": { - "reload": "mdi:reload", - "write_coil": "mdi:pencil", - "write_register": "mdi:database-edit", - "stop": "mdi:stop", - "restart": "mdi:restart" + "reload": { + "service": "mdi:reload" + }, + "write_coil": { + "service": "mdi:pencil" + }, + "write_register": { + "service": "mdi:database-edit" + }, + "stop": { + "service": "mdi:stop" + }, + "restart": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/modbus/modbus.py b/homeassistant/components/modbus/modbus.py index 82caa772ac4..e70b9de50f0 100644 --- a/homeassistant/components/modbus/modbus.py +++ b/homeassistant/components/modbus/modbus.py @@ -76,8 +76,8 @@ from .validators import check_config _LOGGER = logging.getLogger(__name__) -ConfEntry = namedtuple("ConfEntry", "call_type attr func_name") -RunEntry = namedtuple("RunEntry", "attr func") +ConfEntry = namedtuple("ConfEntry", "call_type attr func_name") # noqa: PYI024 +RunEntry = namedtuple("RunEntry", "attr func") # noqa: PYI024 PB_CALL = [ ConfEntry( CALL_TYPE_COIL, diff --git a/homeassistant/components/modbus/validators.py b/homeassistant/components/modbus/validators.py index 90ef0b5f083..e1120094d01 100644 --- a/homeassistant/components/modbus/validators.py +++ b/homeassistant/components/modbus/validators.py @@ -46,7 +46,7 @@ from .const import ( _LOGGER = logging.getLogger(__name__) -ENTRY = namedtuple( +ENTRY = namedtuple( # noqa: PYI024 "ENTRY", [ "struct_id", @@ -60,7 +60,7 @@ ILLEGAL = "I" OPTIONAL = "O" DEMANDED = "D" -PARM_IS_LEGAL = namedtuple( +PARM_IS_LEGAL = namedtuple( # noqa: PYI024 "PARM_IS_LEGAL", [ "count", diff --git a/homeassistant/components/modern_forms/diagnostics.py b/homeassistant/components/modern_forms/diagnostics.py new file mode 100644 index 00000000000..0011a7c3bab --- /dev/null +++ b/homeassistant/components/modern_forms/diagnostics.py @@ -0,0 +1,36 @@ +"""Diagnostics support for Modern Forms.""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import TYPE_CHECKING, Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_MAC +from homeassistant.core import HomeAssistant + +from .const import DOMAIN +from .coordinator import ModernFormsDataUpdateCoordinator + +REDACT_CONFIG = {CONF_MAC} +REDACT_DEVICE_INFO = {"mac_address", "owner"} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: ConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator: ModernFormsDataUpdateCoordinator = hass.data[DOMAIN][entry.entry_id] + if TYPE_CHECKING: + assert coordinator is not None + + return { + "config_entry": async_redact_data(entry.as_dict(), REDACT_CONFIG), + "device": { + "info": async_redact_data( + asdict(coordinator.modern_forms.info), REDACT_DEVICE_INFO + ), + "status": asdict(coordinator.modern_forms.status), + }, + } diff --git a/homeassistant/components/modern_forms/fan.py b/homeassistant/components/modern_forms/fan.py index c00549c327a..e34038c7be7 100644 --- a/homeassistant/components/modern_forms/fan.py +++ b/homeassistant/components/modern_forms/fan.py @@ -56,7 +56,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_CLEAR_FAN_SLEEP_TIMER, - {}, + None, "async_clear_fan_sleep_timer", ) diff --git a/homeassistant/components/modern_forms/icons.json b/homeassistant/components/modern_forms/icons.json index e5df55dc15e..544e48e17f1 100644 --- a/homeassistant/components/modern_forms/icons.json +++ b/homeassistant/components/modern_forms/icons.json @@ -26,9 +26,17 @@ } }, "services": { - "set_light_sleep_timer": "mdi:timer", - "clear_light_sleep_timer": "mdi:timer-cancel", - "set_fan_sleep_timer": "mdi:timer", - "clear_fan_sleep_timer": "mdi:timer-cancel" + "set_light_sleep_timer": { + "service": "mdi:timer" + }, + "clear_light_sleep_timer": { + "service": "mdi:timer-cancel" + }, + "set_fan_sleep_timer": { + "service": "mdi:timer" + }, + "clear_fan_sleep_timer": { + "service": "mdi:timer-cancel" + } } } diff --git a/homeassistant/components/modern_forms/light.py b/homeassistant/components/modern_forms/light.py index e758a50e77e..4c210038694 100644 --- a/homeassistant/components/modern_forms/light.py +++ b/homeassistant/components/modern_forms/light.py @@ -61,7 +61,7 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_CLEAR_LIGHT_SLEEP_TIMER, - {}, + None, "async_clear_light_sleep_timer", ) diff --git a/homeassistant/components/monoprice/config_flow.py b/homeassistant/components/monoprice/config_flow.py index 2c7163123b6..5f0b1bf27b5 100644 --- a/homeassistant/components/monoprice/config_flow.py +++ b/homeassistant/components/monoprice/config_flow.py @@ -3,12 +3,18 @@ from __future__ import annotations import logging +from typing import Any from pymonoprice import get_monoprice from serial import SerialException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_PORT from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -76,7 +82,9 @@ class MonoPriceConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/monoprice/icons.json b/homeassistant/components/monoprice/icons.json index 22610cc2a47..d560c7bcfa8 100644 --- a/homeassistant/components/monoprice/icons.json +++ b/homeassistant/components/monoprice/icons.json @@ -1,6 +1,10 @@ { "services": { - "snapshot": "mdi:content-copy", - "restore": "mdi:content-paste" + "snapshot": { + "service": "mdi:content-copy" + }, + "restore": { + "service": "mdi:content-paste" + } } } diff --git a/homeassistant/components/motion_blinds/button.py b/homeassistant/components/motion_blinds/button.py new file mode 100644 index 00000000000..30f1cd53e6f --- /dev/null +++ b/homeassistant/components/motion_blinds/button.py @@ -0,0 +1,71 @@ +"""Support for Motionblinds button entity using their WLAN API.""" + +from __future__ import annotations + +from motionblinds.motion_blinds import LimitStatus, MotionBlind + +from homeassistant.components.button import ButtonEntity +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DOMAIN, KEY_COORDINATOR, KEY_GATEWAY +from .coordinator import DataUpdateCoordinatorMotionBlinds +from .entity import MotionCoordinatorEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Perform the setup for Motionblinds.""" + entities: list[ButtonEntity] = [] + motion_gateway = hass.data[DOMAIN][config_entry.entry_id][KEY_GATEWAY] + coordinator = hass.data[DOMAIN][config_entry.entry_id][KEY_COORDINATOR] + + for blind in motion_gateway.device_list.values(): + if blind.limit_status == LimitStatus.Limit3Detected.name: + entities.append(MotionGoFavoriteButton(coordinator, blind)) + entities.append(MotionSetFavoriteButton(coordinator, blind)) + + async_add_entities(entities) + + +class MotionGoFavoriteButton(MotionCoordinatorEntity, ButtonEntity): + """Button entity to go to the favorite position of a blind.""" + + _attr_translation_key = "go_favorite" + + def __init__( + self, coordinator: DataUpdateCoordinatorMotionBlinds, blind: MotionBlind + ) -> None: + """Initialize the Motion Button.""" + super().__init__(coordinator, blind) + self._attr_unique_id = f"{blind.mac}-go-favorite" + + async def async_press(self) -> None: + """Execute the button action.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Go_favorite_position) + await self.async_request_position_till_stop() + + +class MotionSetFavoriteButton(MotionCoordinatorEntity, ButtonEntity): + """Button entity to set the favorite position of a blind to the current position.""" + + _attr_entity_category = EntityCategory.CONFIG + _attr_translation_key = "set_favorite" + + def __init__( + self, coordinator: DataUpdateCoordinatorMotionBlinds, blind: MotionBlind + ) -> None: + """Initialize the Motion Button.""" + super().__init__(coordinator, blind) + self._attr_unique_id = f"{blind.mac}-set-favorite" + + async def async_press(self) -> None: + """Execute the button action.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_favorite_position) diff --git a/homeassistant/components/motion_blinds/const.py b/homeassistant/components/motion_blinds/const.py index e089fd17943..96067d7ceb0 100644 --- a/homeassistant/components/motion_blinds/const.py +++ b/homeassistant/components/motion_blinds/const.py @@ -6,7 +6,7 @@ DOMAIN = "motion_blinds" MANUFACTURER = "Motionblinds, Coulisse B.V." DEFAULT_GATEWAY_NAME = "Motionblinds Gateway" -PLATFORMS = [Platform.COVER, Platform.SENSOR] +PLATFORMS = [Platform.BUTTON, Platform.COVER, Platform.SENSOR] CONF_WAIT_FOR_PUSH = "wait_for_push" CONF_INTERFACE = "interface" diff --git a/homeassistant/components/motion_blinds/cover.py b/homeassistant/components/motion_blinds/cover.py index 2cbee96adb7..e60e7fa0ae8 100644 --- a/homeassistant/components/motion_blinds/cover.py +++ b/homeassistant/components/motion_blinds/cover.py @@ -5,7 +5,7 @@ from __future__ import annotations import logging from typing import Any -from motionblinds import DEVICE_TYPES_WIFI, BlindType +from motionblinds import BlindType import voluptuous as vol from homeassistant.components.cover import ( @@ -16,10 +16,9 @@ from homeassistant.components.cover import ( CoverEntityFeature, ) from homeassistant.config_entries import ConfigEntry -from homeassistant.core import CALLBACK_TYPE, HomeAssistant +from homeassistant.core import HomeAssistant from homeassistant.helpers import config_validation as cv, entity_platform from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.event import async_call_later from homeassistant.helpers.typing import VolDictType from .const import ( @@ -31,8 +30,6 @@ from .const import ( KEY_GATEWAY, SERVICE_SET_ABSOLUTE_POSITION, UPDATE_DELAY_STOP, - UPDATE_INTERVAL_MOVING, - UPDATE_INTERVAL_MOVING_WIFI, ) from .entity import MotionCoordinatorEntity @@ -179,14 +176,6 @@ class MotionBaseDevice(MotionCoordinatorEntity, CoverEntity): """Initialize the blind.""" super().__init__(coordinator, blind) - self._requesting_position: CALLBACK_TYPE | None = None - self._previous_positions = [] - - if blind.device_type in DEVICE_TYPES_WIFI: - self._update_interval_moving = UPDATE_INTERVAL_MOVING_WIFI - else: - self._update_interval_moving = UPDATE_INTERVAL_MOVING - self._attr_device_class = device_class self._attr_unique_id = blind.mac @@ -218,47 +207,6 @@ class MotionBaseDevice(MotionCoordinatorEntity, CoverEntity): return None return self._blind.position == 100 - async def async_scheduled_update_request(self, *_): - """Request a state update from the blind at a scheduled point in time.""" - # add the last position to the list and keep the list at max 2 items - self._previous_positions.append(self.current_cover_position) - if len(self._previous_positions) > 2: - del self._previous_positions[: len(self._previous_positions) - 2] - - async with self._api_lock: - await self.hass.async_add_executor_job(self._blind.Update_trigger) - - self.async_write_ha_state() - - if len(self._previous_positions) < 2 or not all( - self.current_cover_position == prev_position - for prev_position in self._previous_positions - ): - # keep updating the position @self._update_interval_moving until the position does not change. - self._requesting_position = async_call_later( - self.hass, - self._update_interval_moving, - self.async_scheduled_update_request, - ) - else: - self._previous_positions = [] - self._requesting_position = None - - async def async_request_position_till_stop(self, delay=None): - """Request the position of the blind every self._update_interval_moving seconds until it stops moving.""" - if delay is None: - delay = self._update_interval_moving - - self._previous_positions = [] - if self.current_cover_position is None: - return - if self._requesting_position is not None: - self._requesting_position() - - self._requesting_position = async_call_later( - self.hass, delay, self.async_scheduled_update_request - ) - async def async_open_cover(self, **kwargs: Any) -> None: """Open the cover.""" async with self._api_lock: @@ -382,23 +330,63 @@ class MotionTiltOnlyDevice(MotionTiltDevice): """Return current position of cover.""" return None + @property + def current_cover_tilt_position(self) -> int | None: + """Return current angle of cover. + + None is unknown, 0 is closed/minimum tilt, 100 is fully open/maximum tilt. + """ + if self._blind.position is None: + if self._blind.angle is None: + return None + return self._blind.angle * 100 / 180 + + return self._blind.position + @property def is_closed(self) -> bool | None: """Return if the cover is closed or not.""" - if self._blind.angle is None: - return None - return self._blind.angle == 0 + if self._blind.position is None: + if self._blind.angle is None: + return None + return self._blind.angle == 0 + + return self._blind.position == 0 + + async def async_open_cover_tilt(self, **kwargs: Any) -> None: + """Open the cover tilt.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Open) + + async def async_close_cover_tilt(self, **kwargs: Any) -> None: + """Close the cover tilt.""" + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Close) + + async def async_set_cover_tilt_position(self, **kwargs: Any) -> None: + """Move the cover tilt to a specific position.""" + angle = kwargs[ATTR_TILT_POSITION] + if self._blind.position is None: + angle = angle * 180 / 100 + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_angle, angle) + else: + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_position, angle) async def async_set_absolute_position(self, **kwargs): """Move the cover to a specific absolute position (see TDBU).""" angle = kwargs.get(ATTR_TILT_POSITION) - if angle is not None: + if angle is None: + return + + if self._blind.position is None: angle = angle * 180 / 100 async with self._api_lock: - await self.hass.async_add_executor_job( - self._blind.Set_angle, - angle, - ) + await self.hass.async_add_executor_job(self._blind.Set_angle, angle) + else: + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Set_position, angle) class MotionTDBUDevice(MotionBaseDevice): diff --git a/homeassistant/components/motion_blinds/entity.py b/homeassistant/components/motion_blinds/entity.py index 4734d4d9a65..483a638a0eb 100644 --- a/homeassistant/components/motion_blinds/entity.py +++ b/homeassistant/components/motion_blinds/entity.py @@ -5,8 +5,10 @@ from __future__ import annotations from motionblinds import DEVICE_TYPES_GATEWAY, DEVICE_TYPES_WIFI, MotionGateway from motionblinds.motion_blinds import MotionBlind +from homeassistant.core import CALLBACK_TYPE from homeassistant.helpers import device_registry as dr from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.event import async_call_later from homeassistant.helpers.update_coordinator import CoordinatorEntity from .const import ( @@ -15,6 +17,8 @@ from .const import ( DOMAIN, KEY_GATEWAY, MANUFACTURER, + UPDATE_INTERVAL_MOVING, + UPDATE_INTERVAL_MOVING_WIFI, ) from .coordinator import DataUpdateCoordinatorMotionBlinds from .gateway import device_name @@ -36,6 +40,14 @@ class MotionCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinatorMotionBlind self._blind = blind self._api_lock = coordinator.api_lock + self._requesting_position: CALLBACK_TYPE | None = None + self._previous_positions: list[int | dict | None] = [] + + if blind.device_type in DEVICE_TYPES_WIFI: + self._update_interval_moving = UPDATE_INTERVAL_MOVING_WIFI + else: + self._update_interval_moving = UPDATE_INTERVAL_MOVING + if blind.device_type in DEVICE_TYPES_GATEWAY: gateway = blind else: @@ -95,3 +107,44 @@ class MotionCoordinatorEntity(CoordinatorEntity[DataUpdateCoordinatorMotionBlind """Unsubscribe when removed.""" self._blind.Remove_callback(self.unique_id) await super().async_will_remove_from_hass() + + async def async_scheduled_update_request(self, *_) -> None: + """Request a state update from the blind at a scheduled point in time.""" + # add the last position to the list and keep the list at max 2 items + self._previous_positions.append(self._blind.position) + if len(self._previous_positions) > 2: + del self._previous_positions[: len(self._previous_positions) - 2] + + async with self._api_lock: + await self.hass.async_add_executor_job(self._blind.Update_trigger) + + self.coordinator.async_update_listeners() + + if len(self._previous_positions) < 2 or not all( + self._blind.position == prev_position + for prev_position in self._previous_positions + ): + # keep updating the position @self._update_interval_moving until the position does not change. + self._requesting_position = async_call_later( + self.hass, + self._update_interval_moving, + self.async_scheduled_update_request, + ) + else: + self._previous_positions = [] + self._requesting_position = None + + async def async_request_position_till_stop(self, delay: int | None = None) -> None: + """Request the position of the blind every self._update_interval_moving seconds until it stops moving.""" + if delay is None: + delay = self._update_interval_moving + + self._previous_positions = [] + if self._blind.position is None: + return + if self._requesting_position is not None: + self._requesting_position() + + self._requesting_position = async_call_later( + self.hass, delay, self.async_scheduled_update_request + ) diff --git a/homeassistant/components/motion_blinds/icons.json b/homeassistant/components/motion_blinds/icons.json index a61c36e3f00..e50e50130f7 100644 --- a/homeassistant/components/motion_blinds/icons.json +++ b/homeassistant/components/motion_blinds/icons.json @@ -1,5 +1,17 @@ { + "entity": { + "button": { + "go_favorite": { + "default": "mdi:star" + }, + "set_favorite": { + "default": "mdi:star-cog" + } + } + }, "services": { - "set_absolute_position": "mdi:set-square" + "set_absolute_position": { + "service": "mdi:set-square" + } } } diff --git a/homeassistant/components/motion_blinds/manifest.json b/homeassistant/components/motion_blinds/manifest.json index 0f9241db7b4..e1e12cf6729 100644 --- a/homeassistant/components/motion_blinds/manifest.json +++ b/homeassistant/components/motion_blinds/manifest.json @@ -21,5 +21,5 @@ "documentation": "https://www.home-assistant.io/integrations/motion_blinds", "iot_class": "local_push", "loggers": ["motionblinds"], - "requirements": ["motionblinds==0.6.23"] + "requirements": ["motionblinds==0.6.24"] } diff --git a/homeassistant/components/motion_blinds/strings.json b/homeassistant/components/motion_blinds/strings.json index cb9468c3a27..ddbf928462a 100644 --- a/homeassistant/components/motion_blinds/strings.json +++ b/homeassistant/components/motion_blinds/strings.json @@ -62,6 +62,14 @@ } }, "entity": { + "button": { + "go_favorite": { + "name": "Go to favorite position" + }, + "set_favorite": { + "name": "Set current position as favorite" + } + }, "cover": { "top": { "name": "Top" diff --git a/homeassistant/components/motionblinds_ble/manifest.json b/homeassistant/components/motionblinds_ble/manifest.json index 454c873dfa2..d9968cfde4c 100644 --- a/homeassistant/components/motionblinds_ble/manifest.json +++ b/homeassistant/components/motionblinds_ble/manifest.json @@ -14,5 +14,5 @@ "integration_type": "device", "iot_class": "assumed_state", "loggers": ["motionblindsble"], - "requirements": ["motionblindsble==0.1.0"] + "requirements": ["motionblindsble==0.1.1"] } diff --git a/homeassistant/components/motionblinds_ble/sensor.py b/homeassistant/components/motionblinds_ble/sensor.py index fbab5d06251..aa0f5ef7c90 100644 --- a/homeassistant/components/motionblinds_ble/sensor.py +++ b/homeassistant/components/motionblinds_ble/sensor.py @@ -89,6 +89,7 @@ SENSORS: tuple[MotionblindsBLESensorEntityDescription, ...] = ( native_unit_of_measurement=SIGNAL_STRENGTH_DECIBELS_MILLIWATT, register_callback_func=lambda device: device.register_signal_strength_callback, value_func=lambda value: value, + entity_registry_enabled_default=False, ), ) diff --git a/homeassistant/components/motioneye/camera.py b/homeassistant/components/motioneye/camera.py index da5eb36d494..d84f7b43c04 100644 --- a/homeassistant/components/motioneye/camera.py +++ b/homeassistant/components/motioneye/camera.py @@ -136,7 +136,7 @@ async def async_setup_entry( ) platform.async_register_entity_service( SERVICE_SNAPSHOT, - {}, + None, "async_request_snapshot", ) diff --git a/homeassistant/components/motioneye/icons.json b/homeassistant/components/motioneye/icons.json index b0a4ea8dcb1..7cc93d528e8 100644 --- a/homeassistant/components/motioneye/icons.json +++ b/homeassistant/components/motioneye/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_text_overlay": "mdi:text-box-outline", - "action": "mdi:gesture-tap-button", - "snapshot": "mdi:camera" + "set_text_overlay": { + "service": "mdi:text-box-outline" + }, + "action": { + "service": "mdi:gesture-tap-button" + }, + "snapshot": { + "service": "mdi:camera" + } } } diff --git a/homeassistant/components/mpd/config_flow.py b/homeassistant/components/mpd/config_flow.py index 619fb8936e2..36777a205f9 100644 --- a/homeassistant/components/mpd/config_flow.py +++ b/homeassistant/components/mpd/config_flow.py @@ -32,7 +32,9 @@ class MPDConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a flow initiated by the user.""" errors = {} if user_input: - self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) + self._async_abort_entries_match( + {CONF_HOST: user_input[CONF_HOST], CONF_PORT: user_input[CONF_PORT]} + ) client = MPDClient() client.timeout = 30 client.idletimeout = 10 @@ -65,19 +67,17 @@ class MPDConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Attempt to import the existing configuration.""" - self._async_abort_entries_match({CONF_HOST: import_config[CONF_HOST]}) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) client = MPDClient() client.timeout = 30 client.idletimeout = 10 try: async with timeout(35): - await client.connect(import_config[CONF_HOST], import_config[CONF_PORT]) - if CONF_PASSWORD in import_config: - await client.password(import_config[CONF_PASSWORD]) + await client.connect(import_data[CONF_HOST], import_data[CONF_PORT]) + if CONF_PASSWORD in import_data: + await client.password(import_data[CONF_PASSWORD]) with suppress(mpd.ConnectionError): client.disconnect() except ( @@ -92,10 +92,10 @@ class MPDConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason="unknown") return self.async_create_entry( - title=import_config.get(CONF_NAME, "Music Player Daemon"), + title=import_data.get(CONF_NAME, "Music Player Daemon"), data={ - CONF_HOST: import_config[CONF_HOST], - CONF_PORT: import_config[CONF_PORT], - CONF_PASSWORD: import_config.get(CONF_PASSWORD), + CONF_HOST: import_data[CONF_HOST], + CONF_PORT: import_data[CONF_PORT], + CONF_PASSWORD: import_data.get(CONF_PASSWORD), }, ) diff --git a/homeassistant/components/mqtt/__init__.py b/homeassistant/components/mqtt/__init__.py index 5f7f1b1d330..86eeca2017c 100644 --- a/homeassistant/components/mqtt/__init__.py +++ b/homeassistant/components/mqtt/__init__.py @@ -89,7 +89,7 @@ from .models import ( # noqa: F401 PayloadSentinel, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, + convert_outgoing_mqtt_payload, ) from .subscription import ( # noqa: F401 EntitySubscription, @@ -115,6 +115,7 @@ SERVICE_DUMP = "dump" ATTR_TOPIC_TEMPLATE = "topic_template" ATTR_PAYLOAD_TEMPLATE = "payload_template" +ATTR_EVALUATE_PAYLOAD = "evaluate_payload" MAX_RECONNECT_WAIT = 300 # seconds @@ -166,6 +167,7 @@ MQTT_PUBLISH_SCHEMA = vol.All( vol.Exclusive(ATTR_TOPIC_TEMPLATE, CONF_TOPIC): cv.string, vol.Exclusive(ATTR_PAYLOAD, CONF_PAYLOAD): cv.string, vol.Exclusive(ATTR_PAYLOAD_TEMPLATE, CONF_PAYLOAD): cv.string, + vol.Optional(ATTR_EVALUATE_PAYLOAD): cv.boolean, vol.Optional(ATTR_QOS, default=DEFAULT_QOS): valid_qos_schema, vol.Optional(ATTR_RETAIN, default=DEFAULT_RETAIN): cv.boolean, }, @@ -295,6 +297,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: msg_topic: str | None = call.data.get(ATTR_TOPIC) msg_topic_template: str | None = call.data.get(ATTR_TOPIC_TEMPLATE) payload: PublishPayloadType = call.data.get(ATTR_PAYLOAD) + evaluate_payload: bool = call.data.get(ATTR_EVALUATE_PAYLOAD, False) payload_template: str | None = call.data.get(ATTR_PAYLOAD_TEMPLATE) qos: int = call.data[ATTR_QOS] retain: bool = call.data[ATTR_RETAIN] @@ -303,8 +306,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: # has been deprecated with HA Core 2024.8.0 # and will be removed with HA Core 2025.2.0 rendered_topic: Any = MqttCommandTemplate( - template.Template(msg_topic_template), - hass=hass, + template.Template(msg_topic_template, hass), ).async_render() ir.async_create_issue( hass, @@ -353,8 +355,11 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: }, ) payload = MqttCommandTemplate( - template.Template(payload_template), hass=hass + template.Template(payload_template, hass) ).async_render() + elif evaluate_payload: + # Convert quoted binary literal to raw data + payload = convert_outgoing_mqtt_payload(payload) if TYPE_CHECKING: assert msg_topic is not None diff --git a/homeassistant/components/mqtt/abbreviations.py b/homeassistant/components/mqtt/abbreviations.py index c3efe5667ad..f4a32bbdf9d 100644 --- a/homeassistant/components/mqtt/abbreviations.py +++ b/homeassistant/components/mqtt/abbreviations.py @@ -267,6 +267,7 @@ DEVICE_ABBREVIATIONS = { "name": "name", "mf": "manufacturer", "mdl": "model", + "mdl_id": "model_id", "hw": "hw_version", "sw": "sw_version", "sa": "suggested_area", diff --git a/homeassistant/components/mqtt/addon.py b/homeassistant/components/mqtt/addon.py new file mode 100644 index 00000000000..3ac6748033f --- /dev/null +++ b/homeassistant/components/mqtt/addon.py @@ -0,0 +1,22 @@ +"""Provide MQTT add-on management. + +Currently only supports the official mosquitto add-on. +""" + +from __future__ import annotations + +from homeassistant.components.hassio import AddonManager +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.singleton import singleton + +from .const import DOMAIN, LOGGER + +ADDON_SLUG = "core_mosquitto" +DATA_ADDON_MANAGER = f"{DOMAIN}_addon_manager" + + +@singleton(DATA_ADDON_MANAGER) +@callback +def get_addon_manager(hass: HomeAssistant) -> AddonManager: + """Get the add-on manager.""" + return AddonManager(hass, LOGGER, "Mosquitto Mqtt Broker", ADDON_SLUG) diff --git a/homeassistant/components/mqtt/client.py b/homeassistant/components/mqtt/client.py index 6762f440c5a..4fa8b7db02a 100644 --- a/homeassistant/components/mqtt/client.py +++ b/homeassistant/components/mqtt/client.py @@ -111,6 +111,7 @@ UNSUBSCRIBE_COOLDOWN = 0.1 TIMEOUT_ACK = 10 RECONNECT_INTERVAL_SECONDS = 10 +MAX_WILDCARD_SUBSCRIBES_PER_CALL = 1 MAX_SUBSCRIBES_PER_CALL = 500 MAX_UNSUBSCRIBES_PER_CALL = 500 @@ -893,14 +894,27 @@ class MQTT: if not self._pending_subscriptions: return - subscriptions: dict[str, int] = self._pending_subscriptions + # Split out the wildcard subscriptions, we subscribe to them one by one + pending_subscriptions: dict[str, int] = self._pending_subscriptions + pending_wildcard_subscriptions = { + subscription.topic: pending_subscriptions.pop(subscription.topic) + for subscription in self._wildcard_subscriptions + if subscription.topic in pending_subscriptions + } + self._pending_subscriptions = {} - subscription_list = list(subscriptions.items()) debug_enabled = _LOGGER.isEnabledFor(logging.DEBUG) - for chunk in chunked_or_all(subscription_list, MAX_SUBSCRIBES_PER_CALL): + for chunk in chain( + chunked_or_all( + pending_wildcard_subscriptions.items(), MAX_WILDCARD_SUBSCRIBES_PER_CALL + ), + chunked_or_all(pending_subscriptions.items(), MAX_SUBSCRIBES_PER_CALL), + ): chunk_list = list(chunk) + if not chunk_list: + continue result, mid = self._mqttc.subscribe(chunk_list) diff --git a/homeassistant/components/mqtt/climate.py b/homeassistant/components/mqtt/climate.py index 7873b056889..426bac8e9ca 100644 --- a/homeassistant/components/mqtt/climate.py +++ b/homeassistant/components/mqtt/climate.py @@ -46,6 +46,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util.unit_conversion import TemperatureConverter @@ -84,7 +85,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic diff --git a/homeassistant/components/mqtt/config_flow.py b/homeassistant/components/mqtt/config_flow.py index 17dfc6512b3..ca799ff3653 100644 --- a/homeassistant/components/mqtt/config_flow.py +++ b/homeassistant/components/mqtt/config_flow.py @@ -2,8 +2,10 @@ from __future__ import annotations +import asyncio from collections import OrderedDict from collections.abc import Callable, Mapping +import logging import queue from ssl import PROTOCOL_TLS_CLIENT, SSLContext, SSLError from types import MappingProxyType @@ -14,7 +16,12 @@ from cryptography.x509 import load_pem_x509_certificate import voluptuous as vol from homeassistant.components.file_upload import process_uploaded_file -from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.components.hassio import HassioServiceInfo, is_hassio +from homeassistant.components.hassio.addon_manager import ( + AddonError, + AddonManager, + AddonState, +) from homeassistant.config_entries import ( ConfigEntry, ConfigFlow, @@ -32,6 +39,7 @@ from homeassistant.const import ( CONF_USERNAME, ) from homeassistant.core import callback +from homeassistant.data_entry_flow import AbortFlow from homeassistant.helpers import config_validation as cv from homeassistant.helpers.json import json_dumps from homeassistant.helpers.selector import ( @@ -51,6 +59,7 @@ from homeassistant.helpers.selector import ( ) from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads +from .addon import get_addon_manager from .client import MqttClientSetup from .const import ( ATTR_PAYLOAD, @@ -91,6 +100,11 @@ from .util import ( valid_publish_topic, ) +_LOGGER = logging.getLogger(__name__) + +ADDON_SETUP_TIMEOUT = 5 +ADDON_SETUP_TIMEOUT_ROUNDS = 5 + MQTT_TIMEOUT = 5 ADVANCED_OPTIONS = "advanced_options" @@ -197,6 +211,12 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): entry: ConfigEntry | None _hassio_discovery: dict[str, Any] | None = None + _addon_manager: AddonManager + + def __init__(self) -> None: + """Set up flow instance.""" + self.install_task: asyncio.Task | None = None + self.start_task: asyncio.Task | None = None @staticmethod @callback @@ -206,6 +226,118 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return MQTTOptionsFlowHandler(config_entry) + async def _async_install_addon(self) -> None: + """Install the Mosquitto Mqtt broker add-on.""" + addon_manager: AddonManager = get_addon_manager(self.hass) + await addon_manager.async_schedule_install_addon() + + async def async_step_install_failed( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Add-on installation failed.""" + return self.async_abort( + reason="addon_install_failed", + description_placeholders={"addon": self._addon_manager.addon_name}, + ) + + async def async_step_install_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Install Mosquitto Broker add-on.""" + if self.install_task is None: + self.install_task = self.hass.async_create_task(self._async_install_addon()) + + if not self.install_task.done(): + return self.async_show_progress( + step_id="install_addon", + progress_action="install_addon", + progress_task=self.install_task, + ) + + try: + await self.install_task + except AddonError as err: + _LOGGER.error(err) + return self.async_show_progress_done(next_step_id="install_failed") + finally: + self.install_task = None + + return self.async_show_progress_done(next_step_id="start_addon") + + async def async_step_start_failed( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Add-on start failed.""" + return self.async_abort( + reason="addon_start_failed", + description_placeholders={"addon": self._addon_manager.addon_name}, + ) + + async def async_step_start_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Start Mosquitto Broker add-on.""" + if not self.start_task: + self.start_task = self.hass.async_create_task(self._async_start_addon()) + if not self.start_task.done(): + return self.async_show_progress( + step_id="start_addon", + progress_action="start_addon", + progress_task=self.start_task, + ) + try: + await self.start_task + except AddonError as err: + _LOGGER.error(err) + return self.async_show_progress_done(next_step_id="start_failed") + finally: + self.start_task = None + + return self.async_show_progress_done(next_step_id="setup_entry_from_discovery") + + async def _async_get_config_and_try(self) -> dict[str, Any] | None: + """Get the MQTT add-on discovery info and try the connection.""" + if self._hassio_discovery is not None: + return self._hassio_discovery + addon_manager: AddonManager = get_addon_manager(self.hass) + try: + addon_discovery_config = ( + await addon_manager.async_get_addon_discovery_info() + ) + config: dict[str, Any] = { + CONF_BROKER: addon_discovery_config[CONF_HOST], + CONF_PORT: addon_discovery_config[CONF_PORT], + CONF_USERNAME: addon_discovery_config.get(CONF_USERNAME), + CONF_PASSWORD: addon_discovery_config.get(CONF_PASSWORD), + CONF_DISCOVERY: DEFAULT_DISCOVERY, + } + except AddonError: + # We do not have discovery information yet + return None + if await self.hass.async_add_executor_job( + try_connection, + config, + ): + self._hassio_discovery = config + return config + return None + + async def _async_start_addon(self) -> None: + """Start the Mosquitto Broker add-on.""" + addon_manager: AddonManager = get_addon_manager(self.hass) + await addon_manager.async_schedule_start_addon() + + # Sleep some seconds to let the add-on start properly before connecting. + for _ in range(ADDON_SETUP_TIMEOUT_ROUNDS): + await asyncio.sleep(ADDON_SETUP_TIMEOUT) + # Finish setup using discovery info to test the connection + if await self._async_get_config_and_try(): + break + else: + raise AddonError( + f"Failed to correctly start {addon_manager.addon_name} add-on" + ) + async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: @@ -213,13 +345,92 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") + if is_hassio(self.hass): + # Offer to set up broker add-on if supervisor is available + self._addon_manager = get_addon_manager(self.hass) + return self.async_show_menu( + step_id="user", + menu_options=["addon", "broker"], + description_placeholders={"addon": self._addon_manager.addon_name}, + ) + + # Start up a flow for manual setup return await self.async_step_broker() + async def async_step_setup_entry_from_discovery( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Set up mqtt entry from discovery info.""" + if (config := await self._async_get_config_and_try()) is not None: + return self.async_create_entry( + title=self._addon_manager.addon_name, + data=config, + ) + + raise AbortFlow( + "addon_connection_failed", + description_placeholders={"addon": self._addon_manager.addon_name}, + ) + + async def async_step_addon( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Install and start MQTT broker add-on.""" + addon_manager = self._addon_manager + + try: + addon_info = await addon_manager.async_get_addon_info() + except AddonError as err: + raise AbortFlow( + "addon_info_failed", + description_placeholders={"addon": self._addon_manager.addon_name}, + ) from err + + if addon_info.state == AddonState.RUNNING: + # Finish setup using discovery info + return await self.async_step_setup_entry_from_discovery() + + if addon_info.state == AddonState.NOT_RUNNING: + return await self.async_step_start_addon() + + # Install the add-on and start it + return await self.async_step_install_addon() + async def async_step_reauth( self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-authentication with MQTT broker.""" self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) + if is_hassio(self.hass): + # Check if entry setup matches the add-on discovery config + addon_manager = get_addon_manager(self.hass) + try: + addon_discovery_config = ( + await addon_manager.async_get_addon_discovery_info() + ) + except AddonError: + # Follow manual flow if we have an error + pass + else: + # Check if the addon secrets need to be renewed. + # This will repair the config entry, + # in case the official Mosquitto Broker addon was re-installed. + if ( + entry_data[CONF_BROKER] == addon_discovery_config[CONF_HOST] + and entry_data[CONF_PORT] == addon_discovery_config[CONF_PORT] + and entry_data.get(CONF_USERNAME) + == (username := addon_discovery_config.get(CONF_USERNAME)) + and entry_data.get(CONF_PASSWORD) + != (password := addon_discovery_config.get(CONF_PASSWORD)) + ): + _LOGGER.info( + "Executing autorecovery %s add-on secrets", + addon_manager.addon_name, + ) + return await self.async_step_reauth_confirm( + user_input={CONF_USERNAME: username, CONF_PASSWORD: password} + ) + return await self.async_step_reauth_confirm() async def async_step_reauth_confirm( @@ -293,7 +504,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): async def async_step_hassio( self, discovery_info: HassioServiceInfo ) -> ConfigFlowResult: - """Receive a Hass.io discovery.""" + """Receive a Hass.io discovery or process setup after addon install.""" await self._async_handle_discovery_without_unique_id() self._hassio_discovery = discovery_info.config diff --git a/homeassistant/components/mqtt/const.py b/homeassistant/components/mqtt/const.py index 9a8e6ae22df..1e1011cc381 100644 --- a/homeassistant/components/mqtt/const.py +++ b/homeassistant/components/mqtt/const.py @@ -1,5 +1,7 @@ """Constants used by multiple MQTT modules.""" +import logging + import jinja2 from homeassistant.const import CONF_PAYLOAD, Platform @@ -37,6 +39,7 @@ CONF_ENCODING = "encoding" CONF_JSON_ATTRS_TOPIC = "json_attributes_topic" CONF_JSON_ATTRS_TEMPLATE = "json_attributes_template" CONF_KEEPALIVE = "keepalive" +CONF_OPTIONS = "options" CONF_ORIGIN = "origin" CONF_QOS = ATTR_QOS CONF_RETAIN = ATTR_RETAIN @@ -148,6 +151,7 @@ DEFAULT_WILL = { } DOMAIN = "mqtt" +LOGGER = logging.getLogger(__package__) MQTT_CONNECTION_STATE = "mqtt_connection_state" diff --git a/homeassistant/components/mqtt/device_tracker.py b/homeassistant/components/mqtt/device_tracker.py index b2aeb4c0fc1..57614106d4e 100644 --- a/homeassistant/components/mqtt/device_tracker.py +++ b/homeassistant/components/mqtt/device_tracker.py @@ -27,13 +27,14 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription from .config import MQTT_BASE_SCHEMA from .const import CONF_PAYLOAD_RESET, CONF_STATE_TOPIC from .mixins import CONF_JSON_ATTRS_TOPIC, MqttEntity, async_setup_entity_entry_helper -from .models import MqttValueTemplate, ReceiveMessage, ReceivePayloadType +from .models import MqttValueTemplate, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_subscribe_topic diff --git a/homeassistant/components/mqtt/discovery.py b/homeassistant/components/mqtt/discovery.py index cf2941a3665..8e379633674 100644 --- a/homeassistant/components/mqtt/discovery.py +++ b/homeassistant/components/mqtt/discovery.py @@ -5,6 +5,7 @@ from __future__ import annotations import asyncio from collections import deque import functools +from itertools import chain import logging import re import time @@ -238,10 +239,6 @@ async def async_start( # noqa: C901 component, node_id, object_id = match.groups() - if component not in SUPPORTED_COMPONENTS: - _LOGGER.warning("Integration %s is not supported", component) - return - if payload: try: discovery_payload = MQTTDiscoveryPayload(json_loads_object(payload)) @@ -351,9 +348,15 @@ async def async_start( # noqa: C901 0, job_type=HassJobType.Callback, ) - for topic in ( - f"{discovery_topic}/+/+/config", - f"{discovery_topic}/+/+/+/config", + for topic in chain( + ( + f"{discovery_topic}/{component}/+/config" + for component in SUPPORTED_COMPONENTS + ), + ( + f"{discovery_topic}/{component}/+/+/config" + for component in SUPPORTED_COMPONENTS + ), ) ] diff --git a/homeassistant/components/mqtt/event.py b/homeassistant/components/mqtt/event.py index 5e801fda54b..0dc267f80f9 100644 --- a/homeassistant/components/mqtt/event.py +++ b/homeassistant/components/mqtt/event.py @@ -19,6 +19,7 @@ from homeassistant.const import CONF_DEVICE_CLASS, CONF_NAME, CONF_VALUE_TEMPLAT from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads_object @@ -32,7 +33,6 @@ from .models import ( MqttValueTemplateException, PayloadSentinel, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/fan.py b/homeassistant/components/mqtt/fan.py index 1838ce20e4d..a22dba4ae93 100644 --- a/homeassistant/components/mqtt/fan.py +++ b/homeassistant/components/mqtt/fan.py @@ -29,6 +29,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util.percentage import ( @@ -52,7 +53,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic diff --git a/homeassistant/components/mqtt/humidifier.py b/homeassistant/components/mqtt/humidifier.py index a4510ee5951..d55c1d3cebf 100644 --- a/homeassistant/components/mqtt/humidifier.py +++ b/homeassistant/components/mqtt/humidifier.py @@ -32,6 +32,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, VolSchemaType @@ -54,7 +55,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic diff --git a/homeassistant/components/mqtt/icons.json b/homeassistant/components/mqtt/icons.json index 1979359c5a1..73cbf22b629 100644 --- a/homeassistant/components/mqtt/icons.json +++ b/homeassistant/components/mqtt/icons.json @@ -1,7 +1,13 @@ { "services": { - "publish": "mdi:publish", - "dump": "mdi:database-export", - "reload": "mdi:reload" + "publish": { + "service": "mdi:publish" + }, + "dump": { + "service": "mdi:database-export" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/mqtt/lawn_mower.py b/homeassistant/components/mqtt/lawn_mower.py index a74d278401c..f4aa248929e 100644 --- a/homeassistant/components/mqtt/lawn_mower.py +++ b/homeassistant/components/mqtt/lawn_mower.py @@ -20,6 +20,7 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription @@ -31,7 +32,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic, valid_subscribe_topic diff --git a/homeassistant/components/mqtt/light/schema_basic.py b/homeassistant/components/mqtt/light/schema_basic.py index b0ffae4e328..1a64b1eecb4 100644 --- a/homeassistant/components/mqtt/light/schema_basic.py +++ b/homeassistant/components/mqtt/light/schema_basic.py @@ -39,6 +39,7 @@ from homeassistant.const import ( from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType import homeassistant.util.color as color_util @@ -57,7 +58,6 @@ from ..models import ( PayloadSentinel, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, TemplateVarsType, ) from ..schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/light/schema_template.py b/homeassistant/components/mqtt/light/schema_template.py index c35b0e6ced9..a1f4ea2e81a 100644 --- a/homeassistant/components/mqtt/light/schema_template.py +++ b/homeassistant/components/mqtt/light/schema_template.py @@ -31,6 +31,7 @@ from homeassistant.const import ( from homeassistant.core import callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, TemplateVarsType, VolSchemaType import homeassistant.util.color as color_util @@ -43,7 +44,6 @@ from ..models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from ..schemas import MQTT_ENTITY_COMMON_SCHEMA from .schema import MQTT_LIGHT_SCHEMA_SCHEMA diff --git a/homeassistant/components/mqtt/lock.py b/homeassistant/components/mqtt/lock.py index 22b0e24b3c6..c72dcd8dc21 100644 --- a/homeassistant/components/mqtt/lock.py +++ b/homeassistant/components/mqtt/lock.py @@ -21,6 +21,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, TemplateVarsType from . import subscription @@ -39,7 +40,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/mixins.py b/homeassistant/components/mqtt/mixins.py index aca88f2cb97..ce811e13a24 100644 --- a/homeassistant/components/mqtt/mixins.py +++ b/homeassistant/components/mqtt/mixins.py @@ -16,6 +16,7 @@ from homeassistant.const import ( ATTR_HW_VERSION, ATTR_MANUFACTURER, ATTR_MODEL, + ATTR_MODEL_ID, ATTR_NAME, ATTR_SERIAL_NUMBER, ATTR_SUGGESTED_AREA, @@ -25,6 +26,7 @@ from homeassistant.const import ( CONF_ENTITY_CATEGORY, CONF_ICON, CONF_MODEL, + CONF_MODEL_ID, CONF_NAME, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, @@ -992,6 +994,9 @@ def device_info_from_specifications( if CONF_MODEL in specifications: info[ATTR_MODEL] = specifications[CONF_MODEL] + if CONF_MODEL_ID in specifications: + info[ATTR_MODEL_ID] = specifications[CONF_MODEL_ID] + if CONF_NAME in specifications: info[ATTR_NAME] = specifications[CONF_NAME] diff --git a/homeassistant/components/mqtt/models.py b/homeassistant/components/mqtt/models.py index c355510a5c2..f7abbc29464 100644 --- a/homeassistant/components/mqtt/models.py +++ b/homeassistant/components/mqtt/models.py @@ -12,7 +12,7 @@ import logging from typing import TYPE_CHECKING, Any, TypedDict from homeassistant.const import ATTR_ENTITY_ID, ATTR_NAME, Platform -from homeassistant.core import CALLBACK_TYPE, HomeAssistant, callback +from homeassistant.core import CALLBACK_TYPE, callback from homeassistant.exceptions import ServiceValidationError, TemplateError from homeassistant.helpers import template from homeassistant.helpers.entity import Entity @@ -51,6 +51,22 @@ ATTR_THIS = "this" type PublishPayloadType = str | bytes | int | float | None +def convert_outgoing_mqtt_payload( + payload: PublishPayloadType, +) -> PublishPayloadType: + """Ensure correct raw MQTT payload is passed as bytes for publishing.""" + if isinstance(payload, str) and payload.startswith(("b'", 'b"')): + try: + native_object = literal_eval(payload) + except (ValueError, TypeError, SyntaxError, MemoryError): + pass + else: + if isinstance(native_object, bytes): + return native_object + + return payload + + @dataclass class PublishMessage: """MQTT Message for publishing.""" @@ -159,22 +175,13 @@ class MqttCommandTemplate: self, command_template: template.Template | None, *, - hass: HomeAssistant | None = None, entity: Entity | None = None, ) -> None: """Instantiate a command template.""" self._template_state: template.TemplateStateFromEntityId | None = None self._command_template = command_template - if command_template is None: - return - self._entity = entity - command_template.hass = hass - - if entity: - command_template.hass = entity.hass - @callback def async_render( self, @@ -182,22 +189,6 @@ class MqttCommandTemplate: variables: TemplateVarsType = None, ) -> PublishPayloadType: """Render or convert the command template with given value or variables.""" - - def _convert_outgoing_payload( - payload: PublishPayloadType, - ) -> PublishPayloadType: - """Ensure correct raw MQTT payload is passed as bytes for publishing.""" - if isinstance(payload, str): - try: - native_object = literal_eval(payload) - if isinstance(native_object, bytes): - return native_object - - except (ValueError, TypeError, SyntaxError, MemoryError): - pass - - return payload - if self._command_template is None: return value @@ -219,7 +210,7 @@ class MqttCommandTemplate: self._command_template, ) try: - return _convert_outgoing_payload( + return convert_outgoing_mqtt_payload( self._command_template.async_render(values, parse_result=False) ) except TemplateError as exc: @@ -270,7 +261,6 @@ class MqttValueTemplate: self, value_template: template.Template | None, *, - hass: HomeAssistant | None = None, entity: Entity | None = None, config_attributes: TemplateVarsType = None, ) -> None: @@ -278,15 +268,8 @@ class MqttValueTemplate: self._template_state: template.TemplateStateFromEntityId | None = None self._value_template = value_template self._config_attributes = config_attributes - if value_template is None: - return - - value_template.hass = hass self._entity = entity - if entity: - value_template.hass = entity.hass - @callback def async_render_with_possible_json_value( self, diff --git a/homeassistant/components/mqtt/number.py b/homeassistant/components/mqtt/number.py index e8f2cf0cfe4..ce441a2de6e 100644 --- a/homeassistant/components/mqtt/number.py +++ b/homeassistant/components/mqtt/number.py @@ -28,6 +28,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription @@ -44,7 +45,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/schemas.py b/homeassistant/components/mqtt/schemas.py index bbc0194a1a5..67c6b447709 100644 --- a/homeassistant/components/mqtt/schemas.py +++ b/homeassistant/components/mqtt/schemas.py @@ -9,6 +9,7 @@ from homeassistant.const import ( CONF_ENTITY_CATEGORY, CONF_ICON, CONF_MODEL, + CONF_MODEL_ID, CONF_NAME, CONF_UNIQUE_ID, CONF_VALUE_TEMPLATE, @@ -112,6 +113,7 @@ MQTT_ENTITY_DEVICE_INFO_SCHEMA = vol.All( ), vol.Optional(CONF_MANUFACTURER): cv.string, vol.Optional(CONF_MODEL): cv.string, + vol.Optional(CONF_MODEL_ID): cv.string, vol.Optional(CONF_NAME): cv.string, vol.Optional(CONF_HW_VERSION): cv.string, vol.Optional(CONF_SERIAL_NUMBER): cv.string, diff --git a/homeassistant/components/mqtt/select.py b/homeassistant/components/mqtt/select.py index 5cc7a586c71..5f9c4a11c23 100644 --- a/homeassistant/components/mqtt/select.py +++ b/homeassistant/components/mqtt/select.py @@ -15,25 +15,28 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.restore_state import RestoreEntity +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription from .config import MQTT_RW_SCHEMA -from .const import CONF_COMMAND_TEMPLATE, CONF_COMMAND_TOPIC, CONF_STATE_TOPIC +from .const import ( + CONF_COMMAND_TEMPLATE, + CONF_COMMAND_TOPIC, + CONF_OPTIONS, + CONF_STATE_TOPIC, +) from .mixins import MqttEntity, async_setup_entity_entry_helper from .models import ( MqttCommandTemplate, MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA _LOGGER = logging.getLogger(__name__) -CONF_OPTIONS = "options" - DEFAULT_NAME = "MQTT Select" MQTT_SELECT_ATTRIBUTES_BLOCKED = frozenset( diff --git a/homeassistant/components/mqtt/sensor.py b/homeassistant/components/mqtt/sensor.py index 4a41f486831..fc95807b8a5 100644 --- a/homeassistant/components/mqtt/sensor.py +++ b/homeassistant/components/mqtt/sensor.py @@ -33,19 +33,15 @@ from homeassistant.core import CALLBACK_TYPE, HomeAssistant, State, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.event import async_call_later +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from homeassistant.util import dt as dt_util from . import subscription from .config import MQTT_RO_SCHEMA -from .const import CONF_STATE_TOPIC, PAYLOAD_NONE +from .const import CONF_OPTIONS, CONF_STATE_TOPIC, PAYLOAD_NONE from .mixins import MqttAvailabilityMixin, MqttEntity, async_setup_entity_entry_helper -from .models import ( - MqttValueTemplate, - PayloadSentinel, - ReceiveMessage, - ReceivePayloadType, -) +from .models import MqttValueTemplate, PayloadSentinel, ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import check_state_too_long @@ -72,6 +68,7 @@ _PLATFORM_SCHEMA_BASE = MQTT_RO_SCHEMA.extend( vol.Optional(CONF_FORCE_UPDATE, default=DEFAULT_FORCE_UPDATE): cv.boolean, vol.Optional(CONF_LAST_RESET_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_NAME): vol.Any(cv.string, None), + vol.Optional(CONF_OPTIONS): cv.ensure_list, vol.Optional(CONF_SUGGESTED_DISPLAY_PRECISION): cv.positive_int, vol.Optional(CONF_STATE_CLASS): vol.Any(STATE_CLASSES_SCHEMA, None), vol.Optional(CONF_UNIT_OF_MEASUREMENT): vol.Any(cv.string, None), @@ -79,8 +76,8 @@ _PLATFORM_SCHEMA_BASE = MQTT_RO_SCHEMA.extend( ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) -def validate_sensor_state_class_config(config: ConfigType) -> ConfigType: - """Validate the sensor state class config.""" +def validate_sensor_state_and_device_class_config(config: ConfigType) -> ConfigType: + """Validate the sensor options, state and device class config.""" if ( CONF_LAST_RESET_VALUE_TEMPLATE in config and (state_class := config.get(CONF_STATE_CLASS)) != SensorStateClass.TOTAL @@ -90,17 +87,35 @@ def validate_sensor_state_class_config(config: ConfigType) -> ConfigType: f"together with state class `{state_class}`" ) + # Only allow `options` to be set for `enum` sensors + # to limit the possible sensor values + if (options := config.get(CONF_OPTIONS)) is not None: + if not options: + raise vol.Invalid("An empty options list is not allowed") + if config.get(CONF_STATE_CLASS) or config.get(CONF_UNIT_OF_MEASUREMENT): + raise vol.Invalid( + f"Specifying `{CONF_OPTIONS}` is not allowed together with " + f"the `{CONF_STATE_CLASS}` or `{CONF_UNIT_OF_MEASUREMENT}` option" + ) + + if (device_class := config.get(CONF_DEVICE_CLASS)) != SensorDeviceClass.ENUM: + raise vol.Invalid( + f"The option `{CONF_OPTIONS}` can only be used " + f"together with device class `{SensorDeviceClass.ENUM}`, " + f"got `{CONF_DEVICE_CLASS}` '{device_class}'" + ) + return config PLATFORM_SCHEMA_MODERN = vol.All( _PLATFORM_SCHEMA_BASE, - validate_sensor_state_class_config, + validate_sensor_state_and_device_class_config, ) DISCOVERY_SCHEMA = vol.All( _PLATFORM_SCHEMA_BASE.extend({}, extra=vol.REMOVE_EXTRA), - validate_sensor_state_class_config, + validate_sensor_state_and_device_class_config, ) @@ -197,6 +212,7 @@ class MqttSensor(MqttEntity, RestoreSensor): CONF_SUGGESTED_DISPLAY_PRECISION ) self._attr_native_unit_of_measurement = config.get(CONF_UNIT_OF_MEASUREMENT) + self._attr_options = config.get(CONF_OPTIONS) self._attr_state_class = config.get(CONF_STATE_CLASS) self._expire_after = config.get(CONF_EXPIRE_AFTER) @@ -252,6 +268,15 @@ class MqttSensor(MqttEntity, RestoreSensor): else: self._attr_native_value = payload return + if self.options and payload not in self.options: + _LOGGER.warning( + "Ignoring invalid option received on topic '%s', got '%s', allowed: %s", + msg.topic, + payload, + ", ".join(self.options), + ) + return + if self.device_class in { None, SensorDeviceClass.ENUM, @@ -260,7 +285,7 @@ class MqttSensor(MqttEntity, RestoreSensor): return try: if (payload_datetime := dt_util.parse_datetime(payload)) is None: - raise ValueError + raise ValueError # noqa: TRY301 except ValueError: _LOGGER.warning("Invalid state message '%s' from '%s'", payload, msg.topic) self._attr_native_value = None @@ -280,7 +305,7 @@ class MqttSensor(MqttEntity, RestoreSensor): try: last_reset = dt_util.parse_datetime(str(payload)) if last_reset is None: - raise ValueError + raise ValueError # noqa: TRY301 self._attr_last_reset = last_reset except ValueError: _LOGGER.warning( diff --git a/homeassistant/components/mqtt/services.yaml b/homeassistant/components/mqtt/services.yaml index ee5e4ff56e8..c5e4f372bd6 100644 --- a/homeassistant/components/mqtt/services.yaml +++ b/homeassistant/components/mqtt/services.yaml @@ -12,6 +12,11 @@ publish: example: "The temperature is {{ states('sensor.temperature') }}" selector: template: + evaluate_payload: + advanced: true + default: false + selector: + boolean: qos: advanced: true default: 0 diff --git a/homeassistant/components/mqtt/siren.py b/homeassistant/components/mqtt/siren.py index 9f1466dd95d..e7cf9e270bd 100644 --- a/homeassistant/components/mqtt/siren.py +++ b/homeassistant/components/mqtt/siren.py @@ -31,6 +31,7 @@ from homeassistant.core import HomeAssistant, callback import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.json import json_dumps +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType, TemplateVarsType, VolSchemaType from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads_object @@ -51,7 +52,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA diff --git a/homeassistant/components/mqtt/strings.json b/homeassistant/components/mqtt/strings.json index 93131376154..75855f6d9f3 100644 --- a/homeassistant/components/mqtt/strings.json +++ b/homeassistant/components/mqtt/strings.json @@ -23,6 +23,13 @@ }, "config": { "step": { + "user": { + "description": "Please choose how you want to connect to the MQTT broker:", + "menu_options": { + "addon": "Use the official {addon} add-on.", + "broker": "Manually enter the MQTT broker connection details" + } + }, "broker": { "description": "Please enter the connection information of your MQTT broker.", "data": { @@ -63,15 +70,15 @@ "ws_path": "The WebSocket path to be used for the connection to your MQTT broker." } }, + "install_addon": { + "title": "Installing add-on" + }, + "start_addon": { + "title": "Starting add-on" + }, "hassio_confirm": { "title": "MQTT Broker via Home Assistant add-on", - "description": "Do you want to configure Home Assistant to connect to the MQTT broker provided by the add-on {addon}?", - "data": { - "discovery": "Enable discovery" - }, - "data_description": { - "discovery": "Option to enable MQTT automatic discovery." - } + "description": "Do you want to configure Home Assistant to connect to the MQTT broker provided by the add-on {addon}?" }, "reauth_confirm": { "title": "Re-authentication required with the MQTT broker", @@ -87,6 +94,10 @@ } }, "abort": { + "addon_info_failed": "Failed get info for the {addon} add-on.", + "addon_install_failed": "Failed to install the {addon} add-on.", + "addon_start_failed": "Failed to start the {addon} add-on.", + "addon_connection_failed": "Failed to connect to the {addon} add-on. Check the add-on status and try again later.", "already_configured": "[%key:common::config_flow::abort::already_configured_service%]", "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", "single_instance_allowed": "[%key:common::config_flow::abort::single_instance_allowed%]" @@ -174,7 +185,7 @@ "title": "MQTT options", "description": "Discovery - If discovery is enabled (recommended), Home Assistant will automatically discover devices and entities which publish their configuration on the MQTT broker. If discovery is disabled, all configuration must be done manually.\nDiscovery prefix - The prefix a configuration topic for automatic discovery must start with.\nBirth message - The birth message will be sent each time Home Assistant (re)connects to the MQTT broker.\nWill message - The will message will be sent each time Home Assistant loses its connection to the broker, both in case of a clean (e.g. Home Assistant shutting down) and in case of an unclean (e.g. Home Assistant crashing or losing its network connection) disconnect.", "data": { - "discovery": "[%key:component::mqtt::config::step::hassio_confirm::data::discovery%]", + "discovery": "Enable discovery", "discovery_prefix": "Discovery prefix", "birth_enable": "Enable birth message", "birth_topic": "Birth message topic", @@ -230,6 +241,10 @@ "name": "Publish", "description": "Publishes a message to an MQTT topic.", "fields": { + "evaluate_payload": { + "name": "Evaluate payload", + "description": "When `payload` is a Python bytes literal, evaluate the bytes literal and publish the raw data." + }, "topic": { "name": "Topic", "description": "Topic to publish to." diff --git a/homeassistant/components/mqtt/tag.py b/homeassistant/components/mqtt/tag.py index fbb0ea813c2..031c620af4a 100644 --- a/homeassistant/components/mqtt/tag.py +++ b/homeassistant/components/mqtt/tag.py @@ -13,6 +13,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE, CONF_VALUE_TEMPLATE from homeassistant.core import HassJobType, HomeAssistant, callback import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import subscription @@ -31,7 +32,6 @@ from .models import ( MqttValueTemplate, MqttValueTemplateException, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_DEVICE_INFO_SCHEMA from .subscription import EntitySubscription @@ -118,8 +118,7 @@ class MQTTTagScanner(MqttDiscoveryDeviceUpdateMixin): self.hass = hass self._sub_state: dict[str, EntitySubscription] | None = None self._value_template = MqttValueTemplate( - config.get(CONF_VALUE_TEMPLATE), - hass=self.hass, + config.get(CONF_VALUE_TEMPLATE) ).async_render_with_possible_json_value MqttDiscoveryDeviceUpdateMixin.__init__( @@ -136,8 +135,7 @@ class MQTTTagScanner(MqttDiscoveryDeviceUpdateMixin): return self._config = config self._value_template = MqttValueTemplate( - config.get(CONF_VALUE_TEMPLATE), - hass=self.hass, + config.get(CONF_VALUE_TEMPLATE) ).async_render_with_possible_json_value update_device(self.hass, self._config_entry, config) await self.subscribe_topics() diff --git a/homeassistant/components/mqtt/text.py b/homeassistant/components/mqtt/text.py index 0b122dec7b5..0db711cc456 100644 --- a/homeassistant/components/mqtt/text.py +++ b/homeassistant/components/mqtt/text.py @@ -22,6 +22,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.typing import ConfigType, VolSchemaType from . import subscription @@ -33,7 +34,6 @@ from .models import ( MqttValueTemplate, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import check_state_too_long diff --git a/homeassistant/components/mqtt/trigger.py b/homeassistant/components/mqtt/trigger.py index 91ac404a07a..b901176cf88 100644 --- a/homeassistant/components/mqtt/trigger.py +++ b/homeassistant/components/mqtt/trigger.py @@ -18,6 +18,7 @@ from homeassistant.core import ( callback, ) from homeassistant.helpers import config_validation as cv +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.helpers.template import Template from homeassistant.helpers.trigger import TriggerActionType, TriggerData, TriggerInfo from homeassistant.helpers.typing import ConfigType, TemplateVarsType @@ -31,7 +32,6 @@ from .models import ( PayloadSentinel, PublishPayloadType, ReceiveMessage, - ReceivePayloadType, ) TRIGGER_SCHEMA = cv.TRIGGER_BASE_SCHEMA.extend( @@ -60,10 +60,10 @@ async def async_attach_trigger( trigger_data: TriggerData = trigger_info["trigger_data"] command_template: Callable[ [PublishPayloadType, TemplateVarsType], PublishPayloadType - ] = MqttCommandTemplate(config.get(CONF_PAYLOAD), hass=hass).async_render + ] = MqttCommandTemplate(config.get(CONF_PAYLOAD)).async_render value_template: Callable[[ReceivePayloadType, str], ReceivePayloadType] value_template = MqttValueTemplate( - config.get(CONF_VALUE_TEMPLATE), hass=hass + config.get(CONF_VALUE_TEMPLATE) ).async_render_with_possible_json_value encoding: str | None = config[CONF_ENCODING] or None qos: int = config[CONF_QOS] @@ -75,7 +75,6 @@ async def async_attach_trigger( wanted_payload = command_template(None, variables) topic_template: Template = config[CONF_TOPIC] - topic_template.hass = hass topic = topic_template.async_render(variables, limited=True, parse_result=False) mqtt.util.valid_subscribe_topic(topic) diff --git a/homeassistant/components/mqtt/vacuum.py b/homeassistant/components/mqtt/vacuum.py index c9898465184..87d6c9dd744 100644 --- a/homeassistant/components/mqtt/vacuum.py +++ b/homeassistant/components/mqtt/vacuum.py @@ -1,10 +1,5 @@ """Support for MQTT vacuums.""" -# The legacy schema for MQTT vacuum was deprecated with HA Core 2023.8.0 -# and was removed with HA Core 2024.2.0 -# The use of the schema attribute with MQTT vacuum was deprecated with HA Core 2024.2 -# the attribute will be remove with HA Core 2024.8 - from __future__ import annotations import logging @@ -38,15 +33,12 @@ from homeassistant.util.json import json_loads_object from . import subscription from .config import MQTT_BASE_SCHEMA -from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_SCHEMA, CONF_STATE_TOPIC +from .const import CONF_COMMAND_TOPIC, CONF_RETAIN, CONF_STATE_TOPIC from .mixins import MqttEntity, async_setup_entity_entry_helper from .models import ReceiveMessage from .schemas import MQTT_ENTITY_COMMON_SCHEMA from .util import valid_publish_topic -LEGACY = "legacy" -STATE = "state" - BATTERY = "battery_level" FAN_SPEED = "fan_speed" STATE = "state" @@ -149,7 +141,7 @@ MQTT_VACUUM_ATTRIBUTES_BLOCKED = frozenset( MQTT_VACUUM_DOCS_URL = "https://www.home-assistant.io/integrations/vacuum.mqtt/" -VACUUM_BASE_SCHEMA = MQTT_BASE_SCHEMA.extend( +PLATFORM_SCHEMA_MODERN = MQTT_BASE_SCHEMA.extend( { vol.Optional(CONF_FAN_SPEED_LIST, default=[]): vol.All( cv.ensure_list, [cv.string] @@ -173,26 +165,10 @@ VACUUM_BASE_SCHEMA = MQTT_BASE_SCHEMA.extend( ), vol.Optional(CONF_COMMAND_TOPIC): valid_publish_topic, vol.Optional(CONF_RETAIN, default=DEFAULT_RETAIN): cv.boolean, - vol.Optional(CONF_SCHEMA): vol.All(vol.Lower, vol.Any(LEGACY, STATE)), } ).extend(MQTT_ENTITY_COMMON_SCHEMA.schema) -DISCOVERY_SCHEMA = vol.All( - VACUUM_BASE_SCHEMA.extend({}, extra=vol.ALLOW_EXTRA), - # Do not fail a config is the schema option is still present, - # De option was deprecated with HA Core 2024.2 and removed with HA Core 2024.8. - # As we allow extra options, and we will remove this check silently - # with HA Core 2025.8.0, we will only warn, - # if a adiscovery config still uses this option. - cv.removed(CONF_SCHEMA, raise_if_present=False), -) - -PLATFORM_SCHEMA_MODERN = vol.All( - VACUUM_BASE_SCHEMA, - # The schema options was removed with HA Core 2024.8, - # the cleanup is planned for HA Core 2025.8. - cv.removed(CONF_SCHEMA, raise_if_present=True), -) +DISCOVERY_SCHEMA = PLATFORM_SCHEMA_MODERN.extend({}, extra=vol.ALLOW_EXTRA) async def async_setup_entry( diff --git a/homeassistant/components/mysensors/__init__.py b/homeassistant/components/mysensors/__init__.py index ed18b890a24..8ebcbe0e2fe 100644 --- a/homeassistant/components/mysensors/__init__.py +++ b/homeassistant/components/mysensors/__init__.py @@ -10,7 +10,6 @@ from mysensors import BaseAsyncGateway from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceEntry from .const import ( @@ -32,9 +31,6 @@ _LOGGER = logging.getLogger(__name__) DATA_HASS_CONFIG = "hass_config" -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up an instance of the MySensors integration. diff --git a/homeassistant/components/mysensors/binary_sensor.py b/homeassistant/components/mysensors/binary_sensor.py index a0a1c92c682..b8a3769308a 100644 --- a/homeassistant/components/mysensors/binary_sensor.py +++ b/homeassistant/components/mysensors/binary_sensor.py @@ -104,8 +104,8 @@ class MySensorsBinarySensor(mysensors.device.MySensorsChildEntity, BinarySensorE def __init__(self, *args: Any, **kwargs: Any) -> None: """Set up the instance.""" super().__init__(*args, **kwargs) - pres = self.gateway.const.Presentation - self.entity_description = SENSORS[pres(self.child_type).name] + presentation = self.gateway.const.Presentation + self.entity_description = SENSORS[presentation(self.child_type).name] @property def is_on(self) -> bool: diff --git a/homeassistant/components/mysensors/gateway.py b/homeassistant/components/mysensors/gateway.py index 11f27f8a108..00c8d5eecfb 100644 --- a/homeassistant/components/mysensors/gateway.py +++ b/homeassistant/components/mysensors/gateway.py @@ -16,7 +16,6 @@ import voluptuous as vol from homeassistant.components.mqtt import ( DOMAIN as MQTT_DOMAIN, ReceiveMessage as MQTTReceiveMessage, - ReceivePayloadType, async_publish, async_subscribe, ) @@ -24,6 +23,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_DEVICE, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant, callback import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.service_info.mqtt import ReceivePayloadType from homeassistant.setup import SetupPhases, async_pause_setup from homeassistant.util.unit_system import METRIC_SYSTEM diff --git a/homeassistant/components/mysensors/helpers.py b/homeassistant/components/mysensors/helpers.py index f060f3313dc..74dc99e76d3 100644 --- a/homeassistant/components/mysensors/helpers.py +++ b/homeassistant/components/mysensors/helpers.py @@ -168,11 +168,9 @@ def invalid_msg( gateway: BaseAsyncGateway, child: ChildSensor, value_type_name: ValueType ) -> str: """Return a message for an invalid child during schema validation.""" - pres = gateway.const.Presentation + presentation = gateway.const.Presentation set_req = gateway.const.SetReq - return ( - f"{pres(child.type).name} requires value_type {set_req[value_type_name].name}" - ) + return f"{presentation(child.type).name} requires value_type {set_req[value_type_name].name}" def validate_set_msg( @@ -202,10 +200,10 @@ def validate_child( ) -> defaultdict[Platform, list[DevId]]: """Validate a child. Returns a dict mapping hass platform names to list of DevId.""" validated: defaultdict[Platform, list[DevId]] = defaultdict(list) - pres: type[IntEnum] = gateway.const.Presentation + presentation: type[IntEnum] = gateway.const.Presentation set_req: type[IntEnum] = gateway.const.SetReq child_type_name: SensorType | None = next( - (member.name for member in pres if member.value == child.type), None + (member.name for member in presentation if member.value == child.type), None ) if not child_type_name: _LOGGER.warning("Child type %s is not supported", child.type) diff --git a/homeassistant/components/mysensors/sensor.py b/homeassistant/components/mysensors/sensor.py index a6a91c12a81..82e6833f664 100644 --- a/homeassistant/components/mysensors/sensor.py +++ b/homeassistant/components/mysensors/sensor.py @@ -28,6 +28,7 @@ from homeassistant.const import ( UnitOfLength, UnitOfMass, UnitOfPower, + UnitOfReactivePower, UnitOfSoundPressure, UnitOfTemperature, UnitOfVolume, @@ -195,7 +196,7 @@ SENSORS: dict[str, SensorEntityDescription] = { ), "V_VAR": SensorEntityDescription( key="V_VAR", - native_unit_of_measurement="var", + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, ), "V_VA": SensorEntityDescription( key="V_VA", @@ -318,9 +319,9 @@ class MySensorsSensor(mysensors.device.MySensorsChildEntity, SensorEntity): entity_description = SENSORS.get(set_req(self.value_type).name) if not entity_description: - pres = self.gateway.const.Presentation + presentation = self.gateway.const.Presentation entity_description = SENSORS.get( - f"{set_req(self.value_type).name}_{pres(self.child_type).name}" + f"{set_req(self.value_type).name}_{presentation(self.child_type).name}" ) return entity_description diff --git a/homeassistant/components/neato/icons.json b/homeassistant/components/neato/icons.json index ca50d5a9bc7..eb18a7e3196 100644 --- a/homeassistant/components/neato/icons.json +++ b/homeassistant/components/neato/icons.json @@ -1,5 +1,7 @@ { "services": { - "custom_cleaning": "mdi:broom" + "custom_cleaning": { + "service": "mdi:broom" + } } } diff --git a/homeassistant/components/ness_alarm/__init__.py b/homeassistant/components/ness_alarm/__init__.py index a8202434ce5..730a9aff765 100644 --- a/homeassistant/components/ness_alarm/__init__.py +++ b/homeassistant/components/ness_alarm/__init__.py @@ -44,7 +44,7 @@ DEFAULT_INFER_ARMING_STATE = False SIGNAL_ZONE_CHANGED = "ness_alarm.zone_changed" SIGNAL_ARMING_STATE_CHANGED = "ness_alarm.arming_state_changed" -ZoneChangedData = namedtuple("ZoneChangedData", ["zone_id", "state"]) +ZoneChangedData = namedtuple("ZoneChangedData", ["zone_id", "state"]) # noqa: PYI024 DEFAULT_ZONE_TYPE = BinarySensorDeviceClass.MOTION ZONE_SCHEMA = vol.Schema( diff --git a/homeassistant/components/ness_alarm/icons.json b/homeassistant/components/ness_alarm/icons.json index ea17fd2b299..29d8ae1c8f5 100644 --- a/homeassistant/components/ness_alarm/icons.json +++ b/homeassistant/components/ness_alarm/icons.json @@ -1,6 +1,10 @@ { "services": { - "aux": "mdi:audio-input-stereo-minijack", - "panic": "mdi:fire" + "aux": { + "service": "mdi:audio-input-stereo-minijack" + }, + "panic": { + "service": "mdi:fire" + } } } diff --git a/homeassistant/components/nest/__init__.py b/homeassistant/components/nest/__init__.py index bdec44a3c85..8a1719a9bd5 100644 --- a/homeassistant/components/nest/__init__.py +++ b/homeassistant/components/nest/__init__.py @@ -20,6 +20,7 @@ from google_nest_sdm.exceptions import ( DecodeException, SubscriberException, ) +from google_nest_sdm.traits import TraitType import voluptuous as vol from homeassistant.auth.permissions.const import POLICY_READ @@ -65,6 +66,8 @@ from .const import ( ) from .events import EVENT_NAME_MAP, NEST_EVENT from .media_source import ( + EVENT_MEDIA_API_URL_FORMAT, + EVENT_THUMBNAIL_URL_FORMAT, async_get_media_event_store, async_get_media_source_devices, async_get_transcoder, @@ -97,7 +100,7 @@ CONFIG_SCHEMA = vol.Schema( ) # Platforms for SDM API -PLATFORMS = [Platform.CAMERA, Platform.CLIMATE, Platform.SENSOR] +PLATFORMS = [Platform.CAMERA, Platform.CLIMATE, Platform.EVENT, Platform.SENSOR] # Fetch media events with a disk backed cache, with a limit for each camera # device. The largest media items are mp4 clips at ~120kb each, and we target @@ -136,11 +139,15 @@ class SignalUpdateCallback: """An EventCallback invoked when new events arrive from subscriber.""" def __init__( - self, hass: HomeAssistant, config_reload_cb: Callable[[], Awaitable[None]] + self, + hass: HomeAssistant, + config_reload_cb: Callable[[], Awaitable[None]], + config_entry_id: str, ) -> None: """Initialize EventCallback.""" self._hass = hass self._config_reload_cb = config_reload_cb + self._config_entry_id = config_entry_id async def async_handle_event(self, event_message: EventMessage) -> None: """Process an incoming EventMessage.""" @@ -159,19 +166,44 @@ class SignalUpdateCallback: ) if not device_entry: return + supported_traits = self._supported_traits(device_id) for api_event_type, image_event in events.items(): if not (event_type := EVENT_NAME_MAP.get(api_event_type)): continue + nest_event_id = image_event.event_token message = { "device_id": device_entry.id, "type": event_type, "timestamp": event_message.timestamp, - "nest_event_id": image_event.event_token, + "nest_event_id": nest_event_id, } + if ( + TraitType.CAMERA_EVENT_IMAGE in supported_traits + or TraitType.CAMERA_CLIP_PREVIEW in supported_traits + ): + attachment = { + "image": EVENT_THUMBNAIL_URL_FORMAT.format( + device_id=device_entry.id, event_token=image_event.event_token + ) + } + if TraitType.CAMERA_CLIP_PREVIEW in supported_traits: + attachment["video"] = EVENT_MEDIA_API_URL_FORMAT.format( + device_id=device_entry.id, event_token=image_event.event_token + ) + message["attachment"] = attachment if image_event.zones: message["zones"] = image_event.zones self._hass.bus.async_fire(NEST_EVENT, message) + def _supported_traits(self, device_id: str) -> list[TraitType]: + if not ( + device_manager := self._hass.data[DOMAIN] + .get(self._config_entry_id, {}) + .get(DATA_DEVICE_MANAGER) + ) or not (device := device_manager.devices.get(device_id)): + return [] + return list(device.traits) + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Nest from a config entry with dispatch between old/new flows.""" @@ -197,7 +229,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def async_config_reload() -> None: await hass.config_entries.async_reload(entry.entry_id) - update_callback = SignalUpdateCallback(hass, async_config_reload) + update_callback = SignalUpdateCallback(hass, async_config_reload, entry.entry_id) subscriber.set_update_callback(update_callback.async_handle_event) try: await subscriber.start_async() diff --git a/homeassistant/components/nest/event.py b/homeassistant/components/nest/event.py new file mode 100644 index 00000000000..a6d70fe86d5 --- /dev/null +++ b/homeassistant/components/nest/event.py @@ -0,0 +1,129 @@ +"""Event platform for Google Nest.""" + +from dataclasses import dataclass +import logging + +from google_nest_sdm.device import Device +from google_nest_sdm.device_manager import DeviceManager +from google_nest_sdm.event import EventMessage, EventType +from google_nest_sdm.traits import TraitType + +from homeassistant.components.event import ( + EventDeviceClass, + EventEntity, + EventEntityDescription, +) +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from .const import DATA_DEVICE_MANAGER, DOMAIN +from .device_info import NestDeviceInfo +from .events import ( + EVENT_CAMERA_MOTION, + EVENT_CAMERA_PERSON, + EVENT_CAMERA_SOUND, + EVENT_DOORBELL_CHIME, + EVENT_NAME_MAP, +) + +_LOGGER = logging.getLogger(__name__) + + +@dataclass(kw_only=True, frozen=True) +class NestEventEntityDescription(EventEntityDescription): + """Entity description for nest event entities.""" + + trait_types: list[TraitType] + api_event_types: list[EventType] + event_types: list[str] + + +ENTITY_DESCRIPTIONS = [ + NestEventEntityDescription( + key=EVENT_DOORBELL_CHIME, + translation_key="chime", + device_class=EventDeviceClass.DOORBELL, + event_types=[EVENT_DOORBELL_CHIME], + trait_types=[TraitType.DOORBELL_CHIME], + api_event_types=[EventType.DOORBELL_CHIME], + ), + NestEventEntityDescription( + key=EVENT_CAMERA_MOTION, + translation_key="motion", + device_class=EventDeviceClass.MOTION, + event_types=[EVENT_CAMERA_MOTION, EVENT_CAMERA_PERSON, EVENT_CAMERA_SOUND], + trait_types=[ + TraitType.CAMERA_MOTION, + TraitType.CAMERA_PERSON, + TraitType.CAMERA_SOUND, + ], + api_event_types=[ + EventType.CAMERA_MOTION, + EventType.CAMERA_PERSON, + EventType.CAMERA_SOUND, + ], + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, entry: ConfigEntry, async_add_entities: AddEntitiesCallback +) -> None: + """Set up the sensors.""" + + device_manager: DeviceManager = hass.data[DOMAIN][entry.entry_id][ + DATA_DEVICE_MANAGER + ] + async_add_entities( + NestTraitEventEntity(desc, device) + for device in device_manager.devices.values() + for desc in ENTITY_DESCRIPTIONS + if any(trait in device.traits for trait in desc.trait_types) + ) + + +class NestTraitEventEntity(EventEntity): + """Nest doorbell event entity.""" + + entity_description: NestEventEntityDescription + _attr_has_entity_name = True + + def __init__( + self, entity_description: NestEventEntityDescription, device: Device + ) -> None: + """Initialize the event entity.""" + self.entity_description = entity_description + self._device = device + self._attr_unique_id = f"{device.name}-{entity_description.key}" + self._attr_device_info = NestDeviceInfo(device).device_info + + async def _async_handle_event(self, event_message: EventMessage) -> None: + """Handle a device event.""" + if ( + event_message.relation_update + or not event_message.resource_update_name + or not (events := event_message.resource_update_events) + ): + return + last_nest_event_id = self.state_attributes.get("nest_event_id") + for api_event_type, nest_event in events.items(): + if api_event_type not in self.entity_description.api_event_types: + continue + + event_type = EVENT_NAME_MAP[api_event_type] + nest_event_id = nest_event.event_token + if last_nest_event_id is not None and last_nest_event_id == nest_event_id: + # This event is a duplicate message in the same thread + return + + self._trigger_event( + event_type, + {"nest_event_id": nest_event_id}, + ) + self.async_write_ha_state() + return + + async def async_added_to_hass(self) -> None: + """Run when entity is added to attach an event listener.""" + self.async_on_remove(self._device.add_event_callback(self._async_handle_event)) diff --git a/homeassistant/components/nest/manifest.json b/homeassistant/components/nest/manifest.json index 3472fa64e8f..1b0697f7602 100644 --- a/homeassistant/components/nest/manifest.json +++ b/homeassistant/components/nest/manifest.json @@ -20,5 +20,5 @@ "iot_class": "cloud_push", "loggers": ["google_nest_sdm"], "quality_scale": "platinum", - "requirements": ["google-nest-sdm==4.0.7"] + "requirements": ["google-nest-sdm==5.0.0"] } diff --git a/homeassistant/components/nest/strings.json b/homeassistant/components/nest/strings.json index 35e1cc68165..cd915acfbe5 100644 --- a/homeassistant/components/nest/strings.json +++ b/homeassistant/components/nest/strings.json @@ -72,5 +72,31 @@ "title": "Legacy Works With Nest has been removed", "description": "Legacy Works With Nest has been removed from Home Assistant, and the API shuts down as of September 2023.\n\nYou must take action to use the SDM API. Remove all `nest` configuration from `configuration.yaml` and restart Home Assistant, then see the Nest [integration instructions]({documentation_url}) for set up instructions and supported devices." } + }, + "entity": { + "event": { + "chime": { + "name": "Chime", + "state_attributes": { + "event_type": { + "state": { + "doorbell_chime": "[%key:component::nest::entity::event::chime::name%]" + } + } + } + }, + "motion": { + "name": "[%key:component::event::entity_component::motion::name%]", + "state_attributes": { + "event_type": { + "state": { + "camera_motion": "[%key:component::event::entity_component::motion::name%]", + "camera_person": "Person", + "camera_sound": "Sound" + } + } + } + } + } } } diff --git a/homeassistant/components/netatmo/climate.py b/homeassistant/components/netatmo/climate.py index e257c7a89ea..c2953b9d49d 100644 --- a/homeassistant/components/netatmo/climate.py +++ b/homeassistant/components/netatmo/climate.py @@ -174,7 +174,7 @@ async def async_setup_entry( ) platform.async_register_entity_service( SERVICE_CLEAR_TEMPERATURE_SETTING, - {}, + None, "_async_service_clear_temperature_setting", ) diff --git a/homeassistant/components/netatmo/icons.json b/homeassistant/components/netatmo/icons.json index 31b1740ab21..70a51542126 100644 --- a/homeassistant/components/netatmo/icons.json +++ b/homeassistant/components/netatmo/icons.json @@ -34,15 +34,35 @@ } }, "services": { - "set_camera_light": "mdi:led-on", - "set_schedule": "mdi:calendar-clock", - "set_preset_mode_with_end_datetime": "mdi:calendar-clock", - "set_temperature_with_end_datetime": "mdi:thermometer", - "set_temperature_with_time_period": "mdi:thermometer", - "clear_temperature_setting": "mdi:thermometer", - "set_persons_home": "mdi:home", - "set_person_away": "mdi:walk", - "register_webhook": "mdi:link-variant", - "unregister_webhook": "mdi:link-variant-off" + "set_camera_light": { + "service": "mdi:led-on" + }, + "set_schedule": { + "service": "mdi:calendar-clock" + }, + "set_preset_mode_with_end_datetime": { + "service": "mdi:calendar-clock" + }, + "set_temperature_with_end_datetime": { + "service": "mdi:thermometer" + }, + "set_temperature_with_time_period": { + "service": "mdi:thermometer" + }, + "clear_temperature_setting": { + "service": "mdi:thermometer" + }, + "set_persons_home": { + "service": "mdi:home" + }, + "set_person_away": { + "service": "mdi:walk" + }, + "register_webhook": { + "service": "mdi:link-variant" + }, + "unregister_webhook": { + "service": "mdi:link-variant-off" + } } } diff --git a/homeassistant/components/netatmo/manifest.json b/homeassistant/components/netatmo/manifest.json index 98734bcb742..0a32777b527 100644 --- a/homeassistant/components/netatmo/manifest.json +++ b/homeassistant/components/netatmo/manifest.json @@ -12,5 +12,5 @@ "integration_type": "hub", "iot_class": "cloud_polling", "loggers": ["pyatmo"], - "requirements": ["pyatmo==8.0.3"] + "requirements": ["pyatmo==8.1.0"] } diff --git a/homeassistant/components/netatmo/select.py b/homeassistant/components/netatmo/select.py index 3fe098a75a9..92568b73e80 100644 --- a/homeassistant/components/netatmo/select.py +++ b/homeassistant/components/netatmo/select.py @@ -72,7 +72,7 @@ class NetatmoScheduleSelect(NetatmoBaseEntity, SelectEntity): self._attr_current_option = getattr(self.home.get_selected_schedule(), "name") self._attr_options = [ - schedule.name for schedule in self.home.schedules.values() + schedule.name for schedule in self.home.schedules.values() if schedule.name ] async def async_added_to_hass(self) -> None: @@ -128,5 +128,5 @@ class NetatmoScheduleSelect(NetatmoBaseEntity, SelectEntity): self.home.schedules ) self._attr_options = [ - schedule.name for schedule in self.home.schedules.values() + schedule.name for schedule in self.home.schedules.values() if schedule.name ] diff --git a/homeassistant/components/netgear/config_flow.py b/homeassistant/components/netgear/config_flow.py index a872e9fb4ac..55112c6662c 100644 --- a/homeassistant/components/netgear/config_flow.py +++ b/homeassistant/components/netgear/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import logging -from typing import cast +from typing import Any, cast from urllib.parse import urlparse from pynetgear import DEFAULT_HOST, DEFAULT_PORT, DEFAULT_USER @@ -175,7 +175,9 @@ class NetgearFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} diff --git a/homeassistant/components/netgear_lte/icons.json b/homeassistant/components/netgear_lte/icons.json index 543d9bf4690..703d330512b 100644 --- a/homeassistant/components/netgear_lte/icons.json +++ b/homeassistant/components/netgear_lte/icons.json @@ -31,9 +31,17 @@ } }, "services": { - "delete_sms": "mdi:delete", - "set_option": "mdi:cog", - "connect_lte": "mdi:wifi", - "disconnect_lte": "mdi:wifi-off" + "delete_sms": { + "service": "mdi:delete" + }, + "set_option": { + "service": "mdi:cog" + }, + "connect_lte": { + "service": "mdi:wifi" + }, + "disconnect_lte": { + "service": "mdi:wifi-off" + } } } diff --git a/homeassistant/components/netio/switch.py b/homeassistant/components/netio/switch.py index f5627f5e56b..54bfef5e1da 100644 --- a/homeassistant/components/netio/switch.py +++ b/homeassistant/components/netio/switch.py @@ -38,7 +38,7 @@ CONF_OUTLETS = "outlets" DEFAULT_PORT = 1234 DEFAULT_USERNAME = "admin" -Device = namedtuple("Device", ["netio", "entities"]) +Device = namedtuple("Device", ["netio", "entities"]) # noqa: PYI024 DEVICES: dict[str, Device] = {} MIN_TIME_BETWEEN_SCANS = timedelta(seconds=10) diff --git a/homeassistant/components/nexia/__init__.py b/homeassistant/components/nexia/__init__.py index 4d0993d3569..9bc76fdcfdc 100644 --- a/homeassistant/components/nexia/__init__.py +++ b/homeassistant/components/nexia/__init__.py @@ -12,7 +12,6 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import device_registry as dr from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv from .const import CONF_BRAND, DOMAIN, PLATFORMS from .coordinator import NexiaDataUpdateCoordinator @@ -21,8 +20,6 @@ from .util import is_invalid_auth_code _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: NexiaConfigEntry) -> bool: """Configure the base Nexia device for Home Assistant.""" diff --git a/homeassistant/components/nexia/config_flow.py b/homeassistant/components/nexia/config_flow.py index 6d1f4af043b..592ebde61c3 100644 --- a/homeassistant/components/nexia/config_flow.py +++ b/homeassistant/components/nexia/config_flow.py @@ -1,13 +1,14 @@ """Config flow for Nexia integration.""" import logging +from typing import Any import aiohttp from nexia.const import BRAND_ASAIR, BRAND_NEXIA, BRAND_TRANE from nexia.home import NexiaHome import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -81,7 +82,9 @@ class NexiaConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/nexia/icons.json b/homeassistant/components/nexia/icons.json index 620d1a42c03..a2157f5c035 100644 --- a/homeassistant/components/nexia/icons.json +++ b/homeassistant/components/nexia/icons.json @@ -20,8 +20,14 @@ } }, "services": { - "set_aircleaner_mode": "mdi:air-filter", - "set_humidify_setpoint": "mdi:water-percent", - "set_hvac_run_mode": "mdi:hvac" + "set_aircleaner_mode": { + "service": "mdi:air-filter" + }, + "set_humidify_setpoint": { + "service": "mdi:water-percent" + }, + "set_hvac_run_mode": { + "service": "mdi:hvac" + } } } diff --git a/homeassistant/components/nextbus/config_flow.py b/homeassistant/components/nextbus/config_flow.py index 05290733bd9..90a6a4fc912 100644 --- a/homeassistant/components/nextbus/config_flow.py +++ b/homeassistant/components/nextbus/config_flow.py @@ -79,7 +79,7 @@ class NextBusFlowHandler(ConfigFlow, domain=DOMAIN): _route_tags: dict[str, str] _stop_tags: dict[str, str] - def __init__(self): + def __init__(self) -> None: """Initialize NextBus config flow.""" self.data: dict[str, str] = {} self._client = NextBusClient() diff --git a/homeassistant/components/nextcloud/__init__.py b/homeassistant/components/nextcloud/__init__.py index 9e328e8e58d..a487a3f1414 100644 --- a/homeassistant/components/nextcloud/__init__.py +++ b/homeassistant/components/nextcloud/__init__.py @@ -19,14 +19,12 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers import entity_registry as er -from .const import DOMAIN from .coordinator import NextcloudDataUpdateCoordinator PLATFORMS = (Platform.SENSOR, Platform.BINARY_SENSOR, Platform.UPDATE) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/nextcloud/update.py b/homeassistant/components/nextcloud/update.py index 8c292e1bba2..5b9de52ad1d 100644 --- a/homeassistant/components/nextcloud/update.py +++ b/homeassistant/components/nextcloud/update.py @@ -32,12 +32,12 @@ class NextcloudUpdateSensor(NextcloudEntity, UpdateEntity): """Represents a Nextcloud update entity.""" @property - def installed_version(self) -> str | None: + def installed_version(self) -> str: """Version installed and in use.""" - return self.coordinator.data.get("system_version") + return self.coordinator.data["system_version"] @property - def latest_version(self) -> str | None: + def latest_version(self) -> str: """Latest version available for install.""" return self.coordinator.data.get( "update_available_version", self.installed_version @@ -46,7 +46,5 @@ class NextcloudUpdateSensor(NextcloudEntity, UpdateEntity): @property def release_url(self) -> str | None: """URL to the full release notes of the latest version available.""" - if self.latest_version: - ver = "-".join(self.latest_version.split(".")[:3]) - return f"https://nextcloud.com/changelog/#{ver}" - return None + ver = "-".join(self.latest_version.split(".")[:3]) + return f"https://nextcloud.com/changelog/#{ver}" diff --git a/homeassistant/components/nextdns/manifest.json b/homeassistant/components/nextdns/manifest.json index b65706ef1ce..be9eee5049c 100644 --- a/homeassistant/components/nextdns/manifest.json +++ b/homeassistant/components/nextdns/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["nextdns"], "quality_scale": "platinum", - "requirements": ["nextdns==3.1.0"] + "requirements": ["nextdns==3.2.0"] } diff --git a/homeassistant/components/nfandroidtv/__init__.py b/homeassistant/components/nfandroidtv/__init__.py index 42d42e26d1f..ae7a4e615d4 100644 --- a/homeassistant/components/nfandroidtv/__init__.py +++ b/homeassistant/components/nfandroidtv/__init__.py @@ -14,7 +14,7 @@ from .const import DATA_HASS_CONFIG, DOMAIN PLATFORMS = [Platform.NOTIFY] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: diff --git a/homeassistant/components/nice_go/__init__.py b/homeassistant/components/nice_go/__init__.py new file mode 100644 index 00000000000..ab3dc06e3c1 --- /dev/null +++ b/homeassistant/components/nice_go/__init__.py @@ -0,0 +1,48 @@ +"""The Nice G.O. integration.""" + +from __future__ import annotations + +import logging + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from .coordinator import NiceGOUpdateCoordinator + +_LOGGER = logging.getLogger(__name__) +PLATFORMS: list[Platform] = [ + Platform.COVER, + Platform.EVENT, + Platform.LIGHT, + Platform.SWITCH, +] + +type NiceGOConfigEntry = ConfigEntry[NiceGOUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: NiceGOConfigEntry) -> bool: + """Set up Nice G.O. from a config entry.""" + + coordinator = NiceGOUpdateCoordinator(hass) + + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + entry.async_create_background_task( + hass, + coordinator.client_listen(), + "nice_go_websocket_task", + ) + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: NiceGOConfigEntry) -> bool: + """Unload a config entry.""" + if unload_ok := await hass.config_entries.async_unload_platforms(entry, PLATFORMS): + await entry.runtime_data.api.close() + + return unload_ok diff --git a/homeassistant/components/nice_go/config_flow.py b/homeassistant/components/nice_go/config_flow.py new file mode 100644 index 00000000000..9d2c1c05518 --- /dev/null +++ b/homeassistant/components/nice_go/config_flow.py @@ -0,0 +1,68 @@ +"""Config flow for Nice G.O. integration.""" + +from __future__ import annotations + +from datetime import datetime +import logging +from typing import Any + +from nice_go import AuthFailedError, NiceGOApi +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.helpers.aiohttp_client import async_get_clientsession + +from .const import CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_EMAIL): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class NiceGOConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Nice G.O.""" + + VERSION = 1 + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + if user_input is not None: + await self.async_set_unique_id(user_input[CONF_EMAIL]) + self._abort_if_unique_id_configured() + + hub = NiceGOApi() + + try: + refresh_token = await hub.authenticate( + user_input[CONF_EMAIL], + user_input[CONF_PASSWORD], + async_get_clientsession(self.hass), + ) + except AuthFailedError: + errors["base"] = "invalid_auth" + except Exception: # noqa: BLE001 + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + return self.async_create_entry( + title=user_input[CONF_EMAIL], + data={ + CONF_EMAIL: user_input[CONF_EMAIL], + CONF_PASSWORD: user_input[CONF_PASSWORD], + CONF_REFRESH_TOKEN: refresh_token, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), + }, + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/nice_go/const.py b/homeassistant/components/nice_go/const.py new file mode 100644 index 00000000000..c3caa92c8be --- /dev/null +++ b/homeassistant/components/nice_go/const.py @@ -0,0 +1,13 @@ +"""Constants for the Nice G.O. integration.""" + +from datetime import timedelta + +DOMAIN = "nice_go" + +# Configuration +CONF_SITE_ID = "site_id" +CONF_DEVICE_ID = "device_id" +CONF_REFRESH_TOKEN = "refresh_token" +CONF_REFRESH_TOKEN_CREATION_TIME = "refresh_token_creation_time" + +REFRESH_TOKEN_EXPIRY_TIME = timedelta(days=30) diff --git a/homeassistant/components/nice_go/coordinator.py b/homeassistant/components/nice_go/coordinator.py new file mode 100644 index 00000000000..323e0a08fe8 --- /dev/null +++ b/homeassistant/components/nice_go/coordinator.py @@ -0,0 +1,223 @@ +"""DataUpdateCoordinator for Nice G.O.""" + +from __future__ import annotations + +import asyncio +from dataclasses import dataclass +from datetime import datetime +import json +import logging +from typing import Any + +from nice_go import ( + BARRIER_STATUS, + ApiError, + AuthFailedError, + BarrierState, + ConnectionState, + NiceGOApi, +) + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers import issue_registry as ir +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import ( + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, + REFRESH_TOKEN_EXPIRY_TIME, +) + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class NiceGODevice: + """Nice G.O. device dataclass.""" + + id: str + name: str + barrier_status: str + light_status: bool + fw_version: str + connected: bool + vacation_mode: bool + + +class NiceGOUpdateCoordinator(DataUpdateCoordinator[dict[str, NiceGODevice]]): + """DataUpdateCoordinator for Nice G.O.""" + + config_entry: ConfigEntry + organization_id: str + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize DataUpdateCoordinator for Nice G.O.""" + super().__init__( + hass, + _LOGGER, + name="Nice G.O.", + ) + + self.refresh_token = self.config_entry.data[CONF_REFRESH_TOKEN] + self.refresh_token_creation_time = self.config_entry.data[ + CONF_REFRESH_TOKEN_CREATION_TIME + ] + self.email = self.config_entry.data[CONF_EMAIL] + self.password = self.config_entry.data[CONF_PASSWORD] + self.api = NiceGOApi() + self.ws_connected = False + + async def _parse_barrier(self, barrier_state: BarrierState) -> NiceGODevice | None: + """Parse barrier data.""" + + device_id = barrier_state.deviceId + name = barrier_state.reported["displayName"] + if barrier_state.reported["migrationStatus"] == "NOT_STARTED": + ir.async_create_issue( + self.hass, + DOMAIN, + f"firmware_update_required_{device_id}", + is_fixable=False, + severity=ir.IssueSeverity.ERROR, + translation_key="firmware_update_required", + translation_placeholders={"device_name": name}, + ) + return None + ir.async_delete_issue( + self.hass, DOMAIN, f"firmware_update_required_{device_id}" + ) + barrier_status_raw = [ + int(x) for x in barrier_state.reported["barrierStatus"].split(",") + ] + + if BARRIER_STATUS[int(barrier_status_raw[2])] == "STATIONARY": + barrier_status = "open" if barrier_status_raw[0] == 1 else "closed" + else: + barrier_status = BARRIER_STATUS[int(barrier_status_raw[2])].lower() + + light_status = barrier_state.reported["lightStatus"].split(",")[0] == "1" + fw_version = barrier_state.reported["deviceFwVersion"] + if barrier_state.connectionState: + connected = barrier_state.connectionState.connected + else: + connected = False + vacation_mode = barrier_state.reported["vcnMode"] + + return NiceGODevice( + id=device_id, + name=name, + barrier_status=barrier_status, + light_status=light_status, + fw_version=fw_version, + connected=connected, + vacation_mode=vacation_mode, + ) + + async def _async_update_data(self) -> dict[str, NiceGODevice]: + return self.data + + async def _async_setup(self) -> None: + """Set up the coordinator.""" + async with asyncio.timeout(10): + expiry_time = ( + self.refresh_token_creation_time + + REFRESH_TOKEN_EXPIRY_TIME.total_seconds() + ) + try: + if datetime.now().timestamp() >= expiry_time: + await self._update_refresh_token() + else: + await self.api.authenticate_refresh( + self.refresh_token, async_get_clientsession(self.hass) + ) + _LOGGER.debug("Authenticated with Nice G.O. API") + + barriers = await self.api.get_all_barriers() + parsed_barriers = [ + await self._parse_barrier(barrier.state) for barrier in barriers + ] + + # Parse the barriers and save them in a dictionary + devices = { + barrier.id: barrier for barrier in parsed_barriers if barrier + } + self.organization_id = await barriers[0].get_attr("organization") + except AuthFailedError as e: + raise ConfigEntryAuthFailed from e + except ApiError as e: + raise UpdateFailed from e + else: + self.async_set_updated_data(devices) + + async def _update_refresh_token(self) -> None: + """Update the refresh token with Nice G.O. API.""" + _LOGGER.debug("Updating the refresh token with Nice G.O. API") + try: + refresh_token = await self.api.authenticate( + self.email, self.password, async_get_clientsession(self.hass) + ) + except AuthFailedError as e: + _LOGGER.exception("Authentication failed") + raise ConfigEntryAuthFailed from e + except ApiError as e: + _LOGGER.exception("API error") + raise UpdateFailed from e + + self.refresh_token = refresh_token + data = { + **self.config_entry.data, + CONF_REFRESH_TOKEN: refresh_token, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), + } + self.hass.config_entries.async_update_entry(self.config_entry, data=data) + + async def client_listen(self) -> None: + """Listen to the websocket for updates.""" + self.api.event(self.on_connected) + self.api.event(self.on_data) + try: + await self.api.connect(reconnect=True) + except ApiError: + _LOGGER.exception("API error") + + if not self.hass.is_stopping: + await asyncio.sleep(5) + await self.client_listen() + + async def on_data(self, data: dict[str, Any]) -> None: + """Handle incoming data from the websocket.""" + _LOGGER.debug("Received data from the websocket") + _LOGGER.debug(data) + raw_data = data["data"]["devicesStatesUpdateFeed"]["item"] + parsed_data = await self._parse_barrier( + BarrierState( + deviceId=raw_data["deviceId"], + desired=json.loads(raw_data["desired"]), + reported=json.loads(raw_data["reported"]), + connectionState=ConnectionState( + connected=raw_data["connectionState"]["connected"], + updatedTimestamp=raw_data["connectionState"]["updatedTimestamp"], + ) + if raw_data["connectionState"] + else None, + version=raw_data["version"], + timestamp=raw_data["timestamp"], + ) + ) + if parsed_data is None: + return + + data_copy = self.data.copy() + data_copy[parsed_data.id] = parsed_data + + self.async_set_updated_data(data_copy) + + async def on_connected(self) -> None: + """Handle the websocket connection.""" + _LOGGER.debug("Connected to the websocket") + await self.api.subscribe(self.organization_id) diff --git a/homeassistant/components/nice_go/cover.py b/homeassistant/components/nice_go/cover.py new file mode 100644 index 00000000000..4098d9ef426 --- /dev/null +++ b/homeassistant/components/nice_go/cover.py @@ -0,0 +1,72 @@ +"""Cover entity for Nice G.O.""" + +from typing import Any + +from homeassistant.components.cover import ( + CoverDeviceClass, + CoverEntity, + CoverEntityFeature, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NiceGOConfigEntry +from .entity import NiceGOEntity + +PARALLEL_UPDATES = 1 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NiceGOConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nice G.O. cover.""" + coordinator = config_entry.runtime_data + + async_add_entities( + NiceGOCoverEntity(coordinator, device_id, device_data.name) + for device_id, device_data in coordinator.data.items() + ) + + +class NiceGOCoverEntity(NiceGOEntity, CoverEntity): + """Representation of a Nice G.O. cover.""" + + _attr_supported_features = CoverEntityFeature.OPEN | CoverEntityFeature.CLOSE + _attr_name = None + _attr_device_class = CoverDeviceClass.GARAGE + + @property + def is_closed(self) -> bool: + """Return if cover is closed.""" + return self.data.barrier_status == "closed" + + @property + def is_opened(self) -> bool: + """Return if cover is open.""" + return self.data.barrier_status == "open" + + @property + def is_opening(self) -> bool: + """Return if cover is opening.""" + return self.data.barrier_status == "opening" + + @property + def is_closing(self) -> bool: + """Return if cover is closing.""" + return self.data.barrier_status == "closing" + + async def async_close_cover(self, **kwargs: Any) -> None: + """Close the garage door.""" + if self.is_closed: + return + + await self.coordinator.api.close_barrier(self._device_id) + + async def async_open_cover(self, **kwargs: Any) -> None: + """Open the garage door.""" + if self.is_opened: + return + + await self.coordinator.api.open_barrier(self._device_id) diff --git a/homeassistant/components/nice_go/diagnostics.py b/homeassistant/components/nice_go/diagnostics.py new file mode 100644 index 00000000000..2c9a695d4b5 --- /dev/null +++ b/homeassistant/components/nice_go/diagnostics.py @@ -0,0 +1,30 @@ +"""Diagnostics support for Nice G.O..""" + +from __future__ import annotations + +from dataclasses import asdict +from typing import Any + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant + +from . import NiceGOConfigEntry +from .const import CONF_REFRESH_TOKEN + +TO_REDACT = {CONF_PASSWORD, CONF_EMAIL, CONF_REFRESH_TOKEN, "title", "unique_id"} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: NiceGOConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + coordinator = entry.runtime_data + + return { + "entry": async_redact_data(entry.as_dict(), TO_REDACT), + "coordinator_data": { + device_id: asdict(device_data) + for device_id, device_data in coordinator.data.items() + }, + } diff --git a/homeassistant/components/nice_go/entity.py b/homeassistant/components/nice_go/entity.py new file mode 100644 index 00000000000..266ad72add3 --- /dev/null +++ b/homeassistant/components/nice_go/entity.py @@ -0,0 +1,40 @@ +"""Base entity for Nice G.O.""" + +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import DOMAIN +from .coordinator import NiceGODevice, NiceGOUpdateCoordinator + + +class NiceGOEntity(CoordinatorEntity[NiceGOUpdateCoordinator]): + """Common base for Nice G.O. entities.""" + + _attr_has_entity_name = True + + def __init__( + self, + coordinator: NiceGOUpdateCoordinator, + device_id: str, + device_name: str, + ) -> None: + """Initialize the entity.""" + super().__init__(coordinator) + + self._attr_unique_id = device_id + self._device_id = device_id + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, device_id)}, + name=device_name, + sw_version=coordinator.data[device_id].fw_version, + ) + + @property + def data(self) -> NiceGODevice: + """Return the Nice G.O. device.""" + return self.coordinator.data[self._device_id] + + @property + def available(self) -> bool: + """Return if entity is available.""" + return super().available and self.data.connected diff --git a/homeassistant/components/nice_go/event.py b/homeassistant/components/nice_go/event.py new file mode 100644 index 00000000000..a19511b0b11 --- /dev/null +++ b/homeassistant/components/nice_go/event.py @@ -0,0 +1,51 @@ +"""Nice G.O. event platform.""" + +import logging +from typing import Any + +from homeassistant.components.event import EventEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NiceGOConfigEntry +from .entity import NiceGOEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NiceGOConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nice G.O. event.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + NiceGOEventEntity(coordinator, device_id, device_data.name) + for device_id, device_data in coordinator.data.items() + ) + + +EVENT_BARRIER_OBSTRUCTED = "barrier_obstructed" + + +class NiceGOEventEntity(NiceGOEntity, EventEntity): + """Event for Nice G.O. devices.""" + + _attr_translation_key = "barrier_obstructed" + _attr_event_types = [EVENT_BARRIER_OBSTRUCTED] + + async def async_added_to_hass(self) -> None: + """Listen for events.""" + await super().async_added_to_hass() + self.coordinator.api.event(self.on_barrier_obstructed) + + async def on_barrier_obstructed(self, data: dict[str, Any]) -> None: + """Handle barrier obstructed event.""" + _LOGGER.debug("Barrier obstructed event: %s", data) + if data["deviceId"] == self.data.id: + _LOGGER.debug("Barrier obstructed event for %s, triggering", self.data.name) + self._trigger_event(EVENT_BARRIER_OBSTRUCTED) + self.async_write_ha_state() diff --git a/homeassistant/components/nice_go/icons.json b/homeassistant/components/nice_go/icons.json new file mode 100644 index 00000000000..61e36c9eb3a --- /dev/null +++ b/homeassistant/components/nice_go/icons.json @@ -0,0 +1,14 @@ +{ + "entity": { + "switch": { + "vacation_mode": { + "default": "mdi:beach" + } + }, + "event": { + "barrier_obstructed": { + "default": "mdi:garage-alert" + } + } + } +} diff --git a/homeassistant/components/nice_go/light.py b/homeassistant/components/nice_go/light.py new file mode 100644 index 00000000000..4a08364688e --- /dev/null +++ b/homeassistant/components/nice_go/light.py @@ -0,0 +1,48 @@ +"""Nice G.O. light.""" + +from typing import Any + +from homeassistant.components.light import ColorMode, LightEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NiceGOConfigEntry +from .entity import NiceGOEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NiceGOConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nice G.O. light.""" + + coordinator = config_entry.runtime_data + + async_add_entities( + NiceGOLightEntity(coordinator, device_id, device_data.name) + for device_id, device_data in coordinator.data.items() + ) + + +class NiceGOLightEntity(NiceGOEntity, LightEntity): + """Light for Nice G.O. devices.""" + + _attr_color_mode = ColorMode.ONOFF + _attr_supported_color_modes = {ColorMode.ONOFF} + _attr_translation_key = "light" + + @property + def is_on(self) -> bool: + """Return if the light is on or not.""" + return self.data.light_status + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn on the light.""" + + await self.coordinator.api.light_on(self._device_id) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn off the light.""" + + await self.coordinator.api.light_off(self._device_id) diff --git a/homeassistant/components/nice_go/manifest.json b/homeassistant/components/nice_go/manifest.json new file mode 100644 index 00000000000..884f2eb7b18 --- /dev/null +++ b/homeassistant/components/nice_go/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "nice_go", + "name": "Nice G.O.", + "codeowners": ["@IceBotYT"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/nice_go", + "iot_class": "cloud_push", + "loggers": ["nice-go"], + "requirements": ["nice-go==0.3.8"] +} diff --git a/homeassistant/components/nice_go/strings.json b/homeassistant/components/nice_go/strings.json new file mode 100644 index 00000000000..30a2bbf58b6 --- /dev/null +++ b/homeassistant/components/nice_go/strings.json @@ -0,0 +1,50 @@ +{ + "config": { + "step": { + "user": { + "data": { + "email": "[%key:common::config_flow::data::email%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "error": { + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]" + } + }, + "entity": { + "light": { + "light": { + "name": "[%key:component::light::title%]" + } + }, + "switch": { + "vacation_mode": { + "name": "Vacation mode" + } + }, + "event": { + "barrier_obstructed": { + "name": "Barrier obstructed", + "state_attributes": { + "event_type": { + "state": { + "barrier_obstructed": "Barrier obstructed" + } + } + } + } + } + }, + "issues": { + "firmware_update_required": { + "title": "Firmware update required", + "description": "Your device ({device_name}) requires a firmware update on the Nice G.O. app in order to work with this integration. Please update the firmware on the Nice G.O. app and reconfigure this integration." + } + } +} diff --git a/homeassistant/components/nice_go/switch.py b/homeassistant/components/nice_go/switch.py new file mode 100644 index 00000000000..26d42dab124 --- /dev/null +++ b/homeassistant/components/nice_go/switch.py @@ -0,0 +1,49 @@ +"""Nice G.O. switch platform.""" + +from __future__ import annotations + +import logging +from typing import Any + +from homeassistant.components.switch import SwitchDeviceClass, SwitchEntity +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import NiceGOConfigEntry +from .entity import NiceGOEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: NiceGOConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Nice G.O. switch.""" + coordinator = config_entry.runtime_data + + async_add_entities( + NiceGOSwitchEntity(coordinator, device_id, device_data.name) + for device_id, device_data in coordinator.data.items() + ) + + +class NiceGOSwitchEntity(NiceGOEntity, SwitchEntity): + """Representation of a Nice G.O. switch.""" + + _attr_device_class = SwitchDeviceClass.SWITCH + _attr_translation_key = "vacation_mode" + + @property + def is_on(self) -> bool: + """Return if switch is on.""" + return self.data.vacation_mode + + async def async_turn_on(self, **kwargs: Any) -> None: + """Turn the switch on.""" + await self.coordinator.api.vacation_mode_on(self.data.id) + + async def async_turn_off(self, **kwargs: Any) -> None: + """Turn the switch off.""" + await self.coordinator.api.vacation_mode_off(self.data.id) diff --git a/homeassistant/components/nissan_leaf/icons.json b/homeassistant/components/nissan_leaf/icons.json index 5da03ed5f1a..832fce90c08 100644 --- a/homeassistant/components/nissan_leaf/icons.json +++ b/homeassistant/components/nissan_leaf/icons.json @@ -1,6 +1,10 @@ { "services": { - "start_charge": "mdi:flash", - "update": "mdi:update" + "start_charge": { + "service": "mdi:flash" + }, + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/nobo_hub/config_flow.py b/homeassistant/components/nobo_hub/config_flow.py index 6fc5bba2c1b..8aed520f21e 100644 --- a/homeassistant/components/nobo_hub/config_flow.py +++ b/homeassistant/components/nobo_hub/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations import socket -from typing import Any +from typing import TYPE_CHECKING, Any from pynobo import nobo import voluptuous as vol @@ -36,10 +36,10 @@ class NoboHubConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" - self._discovered_hubs = None - self._hub = None + self._discovered_hubs: dict[str, Any] | None = None + self._hub: str | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -75,6 +75,9 @@ class NoboHubConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle configuration of a selected discovered device.""" errors = {} + if TYPE_CHECKING: + assert self._discovered_hubs + assert self._hub if user_input is not None: serial_prefix = self._discovered_hubs[self._hub] serial_suffix = user_input["serial_suffix"] diff --git a/homeassistant/components/notify/__init__.py b/homeassistant/components/notify/__init__.py index 1fc7836ecd8..31c7b8e4d70 100644 --- a/homeassistant/components/notify/__init__.py +++ b/homeassistant/components/notify/__init__.py @@ -91,14 +91,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def persistent_notification(service: ServiceCall) -> None: """Send notification via the built-in persistent_notify integration.""" message: Template = service.data[ATTR_MESSAGE] - message.hass = hass check_templates_warn(hass, message) title = None title_tpl: Template | None if title_tpl := service.data.get(ATTR_TITLE): check_templates_warn(hass, title_tpl) - title_tpl.hass = hass title = title_tpl.async_render(parse_result=False) notification_id = None diff --git a/homeassistant/components/notify/icons.json b/homeassistant/components/notify/icons.json index ace8ee0c96b..e5ab34031f7 100644 --- a/homeassistant/components/notify/icons.json +++ b/homeassistant/components/notify/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "notify": "mdi:bell-ring", - "persistent_notification": "mdi:bell-badge", - "send_message": "mdi:message-arrow-right" + "notify": { + "service": "mdi:bell-ring" + }, + "persistent_notification": { + "service": "mdi:bell-badge" + }, + "send_message": { + "service": "mdi:message-arrow-right" + } } } diff --git a/homeassistant/components/notify/legacy.py b/homeassistant/components/notify/legacy.py index b3871d858e8..dcb148a99f5 100644 --- a/homeassistant/components/notify/legacy.py +++ b/homeassistant/components/notify/legacy.py @@ -105,7 +105,7 @@ def async_setup_legacy( platform.get_service, hass, p_config, discovery_info ) else: - raise HomeAssistantError("Invalid notify platform.") + raise HomeAssistantError("Invalid notify platform.") # noqa: TRY301 if notify_service is None: # Platforms can decide not to create a service based @@ -259,7 +259,6 @@ class BaseNotificationService: title: Template | None if title := service.data.get(ATTR_TITLE): check_templates_warn(self.hass, title) - title.hass = self.hass kwargs[ATTR_TITLE] = title.async_render(parse_result=False) if self.registered_targets.get(service.service) is not None: @@ -268,7 +267,6 @@ class BaseNotificationService: kwargs[ATTR_TARGET] = service.data.get(ATTR_TARGET) check_templates_warn(self.hass, message) - message.hass = self.hass kwargs[ATTR_MESSAGE] = message.async_render(parse_result=False) kwargs[ATTR_DATA] = service.data.get(ATTR_DATA) diff --git a/homeassistant/components/notion/__init__.py b/homeassistant/components/notion/__init__.py index 1793a0cfd47..00bded5c3a0 100644 --- a/homeassistant/components/notion/__init__.py +++ b/homeassistant/components/notion/__init__.py @@ -14,11 +14,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - device_registry as dr, - entity_registry as er, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import EntityDescription from homeassistant.helpers.update_coordinator import CoordinatorEntity @@ -49,7 +45,6 @@ ATTR_SYSTEM_NAME = "system_name" DEFAULT_SCAN_INTERVAL = timedelta(minutes=1) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) # Define a map of old-API task types to new-API listener types: TASK_TYPE_TO_LISTENER_MAP: dict[str, ListenerKind] = { diff --git a/homeassistant/components/nuheat/__init__.py b/homeassistant/components/nuheat/__init__.py index 8eeee1f3f95..fdb49688eba 100644 --- a/homeassistant/components/nuheat/__init__.py +++ b/homeassistant/components/nuheat/__init__.py @@ -11,15 +11,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from .const import CONF_SERIAL_NUMBER, DOMAIN, PLATFORMS _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - def _get_thermostat(api, serial_number): """Authenticate and create the thermostat object.""" diff --git a/homeassistant/components/nuheat/config_flow.py b/homeassistant/components/nuheat/config_flow.py index a5d34f7ae6c..0e090eeab3e 100644 --- a/homeassistant/components/nuheat/config_flow.py +++ b/homeassistant/components/nuheat/config_flow.py @@ -2,12 +2,13 @@ from http import HTTPStatus import logging +from typing import Any import nuheat import requests.exceptions import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -64,7 +65,9 @@ class NuHeatConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/nuki/config_flow.py b/homeassistant/components/nuki/config_flow.py index 286395e1ff3..3b8015827f1 100644 --- a/homeassistant/components/nuki/config_flow.py +++ b/homeassistant/components/nuki/config_flow.py @@ -62,12 +62,14 @@ async def validate_input(hass, data): class NukiConfigFlow(ConfigFlow, domain=DOMAIN): """Nuki config flow.""" - def __init__(self): + def __init__(self) -> None: """Initialize the Nuki config flow.""" - self.discovery_schema = {} - self._data = {} + self.discovery_schema: vol.Schema | None = None + self._data: Mapping[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" return await self.async_step_validate(user_input) diff --git a/homeassistant/components/nuki/icons.json b/homeassistant/components/nuki/icons.json index f74603cb9dc..ea1ff9c4fed 100644 --- a/homeassistant/components/nuki/icons.json +++ b/homeassistant/components/nuki/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "lock_n_go": "mdi:lock-clock", - "set_continuous_mode": "mdi:bell-cog" + "lock_n_go": { + "service": "mdi:lock-clock" + }, + "set_continuous_mode": { + "service": "mdi:bell-cog" + } } } diff --git a/homeassistant/components/number/const.py b/homeassistant/components/number/const.py index 6343c3a599f..ad95c9b5358 100644 --- a/homeassistant/components/number/const.py +++ b/homeassistant/components/number/const.py @@ -14,7 +14,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, @@ -31,6 +30,7 @@ from homeassistant.const import ( UnitOfPower, UnitOfPrecipitationDepth, UnitOfPressure, + UnitOfReactivePower, UnitOfSoundPressure, UnitOfSpeed, UnitOfTemperature, @@ -468,7 +468,7 @@ DEVICE_CLASS_UNITS: dict[NumberDeviceClass, set[type[StrEnum] | str | None]] = { NumberDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), NumberDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), NumberDeviceClass.PRESSURE: set(UnitOfPressure), - NumberDeviceClass.REACTIVE_POWER: {POWER_VOLT_AMPERE_REACTIVE}, + NumberDeviceClass.REACTIVE_POWER: {UnitOfReactivePower.VOLT_AMPERE_REACTIVE}, NumberDeviceClass.SIGNAL_STRENGTH: { SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, diff --git a/homeassistant/components/number/icons.json b/homeassistant/components/number/icons.json index 2ce22fcaa4a..a122aaecb09 100644 --- a/homeassistant/components/number/icons.json +++ b/homeassistant/components/number/icons.json @@ -21,6 +21,9 @@ "carbon_monoxide": { "default": "mdi:molecule-co" }, + "conductivity": { + "default": "mdi:sprout-outline" + }, "current": { "default": "mdi:current-ac" }, @@ -146,6 +149,8 @@ } }, "services": { - "set_value": "mdi:numeric" + "set_value": { + "service": "mdi:numeric" + } } } diff --git a/homeassistant/components/number/strings.json b/homeassistant/components/number/strings.json index d6932286469..580385172e3 100644 --- a/homeassistant/components/number/strings.json +++ b/homeassistant/components/number/strings.json @@ -49,6 +49,9 @@ "carbon_monoxide": { "name": "[%key:component::sensor::entity_component::carbon_monoxide::name%]" }, + "conductivity": { + "name": "[%key:component::sensor::entity_component::conductivity::name%]" + }, "current": { "name": "[%key:component::sensor::entity_component::current::name%]" }, diff --git a/homeassistant/components/nut/__init__.py b/homeassistant/components/nut/__init__.py index 3825db92983..2ce67c76649 100644 --- a/homeassistant/components/nut/__init__.py +++ b/homeassistant/components/nut/__init__.py @@ -172,7 +172,7 @@ def _firmware_from_status(status: dict[str, str]) -> str | None: def _serial_from_status(status: dict[str, str]) -> str | None: - """Find the best serialvalue from the status.""" + """Find the best serial value from the status.""" serial = status.get("device.serial") or status.get("ups.serial") if serial and ( serial.lower() in NUT_FAKE_SERIAL or serial.count("0") == len(serial.strip()) diff --git a/homeassistant/components/nws/icons.json b/homeassistant/components/nws/icons.json index 8f91388a3ef..2aef3a2e614 100644 --- a/homeassistant/components/nws/icons.json +++ b/homeassistant/components/nws/icons.json @@ -1,5 +1,7 @@ { "services": { - "get_forecasts_extra": "mdi:weather-cloudy-clock" + "get_forecasts_extra": { + "service": "mdi:weather-cloudy-clock" + } } } diff --git a/homeassistant/components/nx584/icons.json b/homeassistant/components/nx584/icons.json index 76e5ae82e09..3bd8e485bfd 100644 --- a/homeassistant/components/nx584/icons.json +++ b/homeassistant/components/nx584/icons.json @@ -1,6 +1,10 @@ { "services": { - "bypass_zone": "mdi:wrench", - "unbypass_zone": "mdi:wrench" + "bypass_zone": { + "service": "mdi:wrench" + }, + "unbypass_zone": { + "service": "mdi:wrench" + } } } diff --git a/homeassistant/components/nzbget/__init__.py b/homeassistant/components/nzbget/__init__.py index 61b3f98739c..d47ac78c9d0 100644 --- a/homeassistant/components/nzbget/__init__.py +++ b/homeassistant/components/nzbget/__init__.py @@ -23,7 +23,6 @@ from .coordinator import NZBGetDataUpdateCoordinator PLATFORMS = [Platform.SENSOR, Platform.SWITCH] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) SPEED_LIMIT_SCHEMA = vol.Schema( {vol.Optional(ATTR_SPEED, default=DEFAULT_SPEED_LIMIT): cv.positive_int} diff --git a/homeassistant/components/nzbget/icons.json b/homeassistant/components/nzbget/icons.json index a693e9fec86..ca4f4d584ae 100644 --- a/homeassistant/components/nzbget/icons.json +++ b/homeassistant/components/nzbget/icons.json @@ -1,7 +1,13 @@ { "services": { - "pause": "mdi:pause", - "resume": "mdi:play", - "set_speed": "mdi:speedometer" + "pause": { + "service": "mdi:pause" + }, + "resume": { + "service": "mdi:play" + }, + "set_speed": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/octoprint/config_flow.py b/homeassistant/components/octoprint/config_flow.py index 32f5fa88fff..706670738a6 100644 --- a/homeassistant/components/octoprint/config_flow.py +++ b/homeassistant/components/octoprint/config_flow.py @@ -63,7 +63,9 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for OctoPrint.""" self._sessions: list[aiohttp.ClientSession] = [] - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" # When coming back from the progress steps, the user_input is stored in the # instance variable instead of being passed in @@ -158,9 +160,9 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): """Handle api fetch failure.""" return self.async_abort(reason="auth_failed") - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -213,13 +215,15 @@ class OctoPrintConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauthorization request from Octoprint.""" - self._reauth_data = dict(config) + self._reauth_data = dict(entry_data) self.context.update( { - "title_placeholders": {CONF_HOST: config[CONF_HOST]}, + "title_placeholders": {CONF_HOST: entry_data[CONF_HOST]}, } ) diff --git a/homeassistant/components/octoprint/icons.json b/homeassistant/components/octoprint/icons.json index 972ecabb765..720718fcede 100644 --- a/homeassistant/components/octoprint/icons.json +++ b/homeassistant/components/octoprint/icons.json @@ -1,5 +1,7 @@ { "services": { - "printer_connect": "mdi:lan-connect" + "printer_connect": { + "service": "mdi:lan-connect" + } } } diff --git a/homeassistant/components/ollama/__init__.py b/homeassistant/components/ollama/__init__.py index 2286a2c7b75..3bcba567803 100644 --- a/homeassistant/components/ollama/__init__.py +++ b/homeassistant/components/ollama/__init__.py @@ -13,11 +13,13 @@ from homeassistant.const import CONF_URL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import config_validation as cv +from homeassistant.util.ssl import get_default_context from .const import ( CONF_KEEP_ALIVE, CONF_MAX_HISTORY, CONF_MODEL, + CONF_NUM_CTX, CONF_PROMPT, DEFAULT_TIMEOUT, DOMAIN, @@ -30,6 +32,7 @@ __all__ = [ "CONF_PROMPT", "CONF_MODEL", "CONF_MAX_HISTORY", + "CONF_NUM_CTX", "CONF_KEEP_ALIVE", "DOMAIN", ] @@ -41,7 +44,7 @@ PLATFORMS = (Platform.CONVERSATION,) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Ollama from a config entry.""" settings = {**entry.data, **entry.options} - client = ollama.AsyncClient(host=settings[CONF_URL]) + client = ollama.AsyncClient(host=settings[CONF_URL], verify=get_default_context()) try: async with asyncio.timeout(DEFAULT_TIMEOUT): await client.list() diff --git a/homeassistant/components/ollama/config_flow.py b/homeassistant/components/ollama/config_flow.py index bcdd6e06f48..65b8efaf525 100644 --- a/homeassistant/components/ollama/config_flow.py +++ b/homeassistant/components/ollama/config_flow.py @@ -33,17 +33,22 @@ from homeassistant.helpers.selector import ( TextSelectorConfig, TextSelectorType, ) +from homeassistant.util.ssl import get_default_context from .const import ( CONF_KEEP_ALIVE, CONF_MAX_HISTORY, CONF_MODEL, + CONF_NUM_CTX, CONF_PROMPT, DEFAULT_KEEP_ALIVE, DEFAULT_MAX_HISTORY, DEFAULT_MODEL, + DEFAULT_NUM_CTX, DEFAULT_TIMEOUT, DOMAIN, + MAX_NUM_CTX, + MIN_NUM_CTX, MODEL_NAMES, ) @@ -87,7 +92,9 @@ class OllamaConfigFlow(ConfigFlow, domain=DOMAIN): errors = {} try: - self.client = ollama.AsyncClient(host=self.url) + self.client = ollama.AsyncClient( + host=self.url, verify=get_default_context() + ) async with asyncio.timeout(DEFAULT_TIMEOUT): response = await self.client.list() @@ -255,6 +262,14 @@ def ollama_config_option_schema( description={"suggested_value": options.get(CONF_LLM_HASS_API)}, default="none", ): SelectSelector(SelectSelectorConfig(options=hass_apis)), + vol.Optional( + CONF_NUM_CTX, + description={"suggested_value": options.get(CONF_NUM_CTX, DEFAULT_NUM_CTX)}, + ): NumberSelector( + NumberSelectorConfig( + min=MIN_NUM_CTX, max=MAX_NUM_CTX, step=1, mode=NumberSelectorMode.BOX + ) + ), vol.Optional( CONF_MAX_HISTORY, description={ diff --git a/homeassistant/components/ollama/const.py b/homeassistant/components/ollama/const.py index 97c4f1186fc..6152b223d6d 100644 --- a/homeassistant/components/ollama/const.py +++ b/homeassistant/components/ollama/const.py @@ -11,6 +11,11 @@ DEFAULT_KEEP_ALIVE = -1 # seconds. -1 = indefinite, 0 = never KEEP_ALIVE_FOREVER = -1 DEFAULT_TIMEOUT = 5.0 # seconds +CONF_NUM_CTX = "num_ctx" +DEFAULT_NUM_CTX = 8192 +MIN_NUM_CTX = 2048 +MAX_NUM_CTX = 131072 + CONF_MAX_HISTORY = "max_history" DEFAULT_MAX_HISTORY = 20 diff --git a/homeassistant/components/ollama/conversation.py b/homeassistant/components/ollama/conversation.py index c0423b258f0..1a91c790d27 100644 --- a/homeassistant/components/ollama/conversation.py +++ b/homeassistant/components/ollama/conversation.py @@ -26,9 +26,11 @@ from .const import ( CONF_KEEP_ALIVE, CONF_MAX_HISTORY, CONF_MODEL, + CONF_NUM_CTX, CONF_PROMPT, DEFAULT_KEEP_ALIVE, DEFAULT_MAX_HISTORY, + DEFAULT_NUM_CTX, DOMAIN, MAX_HISTORY_SECONDS, ) @@ -263,6 +265,7 @@ class OllamaConversationEntity( stream=False, # keep_alive requires specifying unit. In this case, seconds keep_alive=f"{settings.get(CONF_KEEP_ALIVE, DEFAULT_KEEP_ALIVE)}s", + options={CONF_NUM_CTX: settings.get(CONF_NUM_CTX, DEFAULT_NUM_CTX)}, ) except (ollama.RequestError, ollama.ResponseError) as err: _LOGGER.error("Unexpected error talking to Ollama server: %s", err) diff --git a/homeassistant/components/ollama/strings.json b/homeassistant/components/ollama/strings.json index 2366ecd0848..c307f160228 100644 --- a/homeassistant/components/ollama/strings.json +++ b/homeassistant/components/ollama/strings.json @@ -27,11 +27,13 @@ "prompt": "Instructions", "llm_hass_api": "[%key:common::config_flow::data::llm_hass_api%]", "max_history": "Max history messages", + "num_ctx": "Context window size", "keep_alive": "Keep alive" }, "data_description": { "prompt": "Instruct how the LLM should respond. This can be a template.", - "keep_alive": "Duration in seconds for Ollama to keep model in memory. -1 = indefinite, 0 = never." + "keep_alive": "Duration in seconds for Ollama to keep model in memory. -1 = indefinite, 0 = never.", + "num_ctx": "Maximum number of text tokens the model can process. Lower to reduce Ollama RAM, or increase for a large number of exposed entities." } } } diff --git a/homeassistant/components/ombi/icons.json b/homeassistant/components/ombi/icons.json index 4b3e32a1e13..15b8af56188 100644 --- a/homeassistant/components/ombi/icons.json +++ b/homeassistant/components/ombi/icons.json @@ -1,7 +1,13 @@ { "services": { - "submit_movie_request": "mdi:movie-roll", - "submit_tv_request": "mdi:television-classic", - "submit_music_request": "mdi:music" + "submit_movie_request": { + "service": "mdi:movie-roll" + }, + "submit_tv_request": { + "service": "mdi:television-classic" + }, + "submit_music_request": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/omnilogic/config_flow.py b/homeassistant/components/omnilogic/config_flow.py index 229f458ceb4..166e4414767 100644 --- a/homeassistant/components/omnilogic/config_flow.py +++ b/homeassistant/components/omnilogic/config_flow.py @@ -3,11 +3,17 @@ from __future__ import annotations import logging +from typing import Any from omnilogic import LoginException, OmniLogic, OmniLogicException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback from homeassistant.helpers import aiohttp_client @@ -30,9 +36,11 @@ class OmniLogicConfigFlow(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OptionsFlowHandler(config_entry) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + errors: dict[str, str] = {} config_entry = self._async_current_entries() if config_entry: diff --git a/homeassistant/components/omnilogic/icons.json b/homeassistant/components/omnilogic/icons.json index ee5b5102177..8f0f13fe652 100644 --- a/homeassistant/components/omnilogic/icons.json +++ b/homeassistant/components/omnilogic/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_pump_speed": "mdi:water-pump" + "set_pump_speed": { + "service": "mdi:water-pump" + } } } diff --git a/homeassistant/components/onkyo/media_player.py b/homeassistant/components/onkyo/media_player.py index 63e76e28dbb..8d8f4d3bfd5 100644 --- a/homeassistant/components/onkyo/media_player.py +++ b/homeassistant/components/onkyo/media_player.py @@ -3,6 +3,7 @@ from __future__ import annotations import asyncio +from dataclasses import dataclass import logging from typing import Any @@ -10,7 +11,7 @@ import pyeiscp import voluptuous as vol from homeassistant.components.media_player import ( - DOMAIN, + DOMAIN as MEDIA_PLAYER_DOMAIN, PLATFORM_SCHEMA as MEDIA_PLAYER_PLATFORM_SCHEMA, MediaPlayerEntity, MediaPlayerEntityFeature, @@ -27,9 +28,14 @@ from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType +from homeassistant.util.hass_dict import HassKey _LOGGER = logging.getLogger(__name__) +DOMAIN = "onkyo" + +DATA_MP_ENTITIES: HassKey[list[dict[str, OnkyoMediaPlayer]]] = HassKey(DOMAIN) + CONF_SOURCES = "sources" CONF_MAX_VOLUME = "max_volume" CONF_RECEIVER_MAX_VOLUME = "receiver_max_volume" @@ -137,6 +143,43 @@ ONKYO_SELECT_OUTPUT_SCHEMA = vol.Schema( SERVICE_SELECT_HDMI_OUTPUT = "onkyo_select_hdmi_output" +@dataclass +class ReceiverInfo: + """Onkyo Receiver information.""" + + host: str + port: int + model_name: str + identifier: str + + +async def async_register_services(hass: HomeAssistant) -> None: + """Register Onkyo services.""" + + async def async_service_handle(service: ServiceCall) -> None: + """Handle for services.""" + entity_ids = service.data[ATTR_ENTITY_ID] + + targets: list[OnkyoMediaPlayer] = [] + for receiver_entities in hass.data[DATA_MP_ENTITIES]: + targets.extend( + entity + for entity in receiver_entities.values() + if entity.entity_id in entity_ids + ) + + for target in targets: + if service.service == SERVICE_SELECT_HDMI_OUTPUT: + await target.async_select_output(service.data[ATTR_HDMI_OUTPUT]) + + hass.services.async_register( + MEDIA_PLAYER_DOMAIN, + SERVICE_SELECT_HDMI_OUTPUT, + async_service_handle, + schema=ONKYO_SELECT_OUTPUT_SCHEMA, + ) + + async def async_setup_platform( hass: HomeAssistant, config: ConfigType, @@ -144,72 +187,73 @@ async def async_setup_platform( discovery_info: DiscoveryInfoType | None = None, ) -> None: """Set up the Onkyo platform.""" + await async_register_services(hass) + receivers: dict[str, pyeiscp.Connection] = {} # indexed by host - entities: dict[str, dict[str, OnkyoMediaPlayer]] = {} # indexed by host and zone - - async def async_service_handle(service: ServiceCall) -> None: - """Handle for services.""" - entity_ids = service.data[ATTR_ENTITY_ID] - targets = [ - entity - for h in entities.values() - for entity in h.values() - if entity.entity_id in entity_ids - ] - - for target in targets: - if service.service == SERVICE_SELECT_HDMI_OUTPUT: - await target.async_select_output(service.data[ATTR_HDMI_OUTPUT]) - - hass.services.async_register( - DOMAIN, - SERVICE_SELECT_HDMI_OUTPUT, - async_service_handle, - schema=ONKYO_SELECT_OUTPUT_SCHEMA, - ) + all_entities = hass.data.setdefault(DATA_MP_ENTITIES, []) host = config.get(CONF_HOST) - name = config[CONF_NAME] + name = config.get(CONF_NAME) max_volume = config[CONF_MAX_VOLUME] receiver_max_volume = config[CONF_RECEIVER_MAX_VOLUME] sources = config[CONF_SOURCES] - @callback - def async_onkyo_update_callback(message: tuple[str, str, Any], origin: str) -> None: - """Process new message from receiver.""" - receiver = receivers[origin] - _LOGGER.debug("Received update callback from %s: %s", receiver.name, message) + async def async_setup_receiver( + info: ReceiverInfo, discovered: bool, name: str | None + ) -> None: + entities: dict[str, OnkyoMediaPlayer] = {} + all_entities.append(entities) - zone, _, value = message - entity = entities[origin].get(zone) - if entity is not None: - if entity.enabled: - entity.process_update(message) - elif zone in ZONES and value != "N/A": - # When we receive the status for a zone, and the value is not "N/A", - # then zone is available on the receiver, so we create the entity for it. - _LOGGER.debug("Discovered %s on %s", ZONES[zone], receiver.name) - zone_entity = OnkyoMediaPlayer( - receiver, sources, zone, max_volume, receiver_max_volume + @callback + def async_onkyo_update_callback( + message: tuple[str, str, Any], origin: str + ) -> None: + """Process new message from receiver.""" + receiver = receivers[origin] + _LOGGER.debug( + "Received update callback from %s: %s", receiver.name, message ) - entities[origin][zone] = zone_entity - async_add_entities([zone_entity]) - @callback - def async_onkyo_connect_callback(origin: str) -> None: - """Receiver (re)connected.""" - receiver = receivers[origin] - _LOGGER.debug("Receiver (re)connected: %s (%s)", receiver.name, receiver.host) + zone, _, value = message + entity = entities.get(zone) + if entity is not None: + if entity.enabled: + entity.process_update(message) + elif zone in ZONES and value != "N/A": + # When we receive the status for a zone, and the value is not "N/A", + # then zone is available on the receiver, so we create the entity for it. + _LOGGER.debug("Discovered %s on %s", ZONES[zone], receiver.name) + zone_entity = OnkyoMediaPlayer( + receiver, sources, zone, max_volume, receiver_max_volume + ) + entities[zone] = zone_entity + async_add_entities([zone_entity]) - for entity in entities[origin].values(): - entity.backfill_state() + @callback + def async_onkyo_connect_callback(origin: str) -> None: + """Receiver (re)connected.""" + receiver = receivers[origin] + _LOGGER.debug( + "Receiver (re)connected: %s (%s)", receiver.name, receiver.host + ) - def setup_receiver(receiver: pyeiscp.Connection) -> None: - KNOWN_HOSTS.append(receiver.host) + for entity in entities.values(): + entity.backfill_state() + + _LOGGER.debug("Creating receiver: %s (%s)", info.model_name, info.host) + receiver = await pyeiscp.Connection.create( + host=info.host, + port=info.port, + update_callback=async_onkyo_update_callback, + connect_callback=async_onkyo_connect_callback, + ) + + receiver.model_name = info.model_name + receiver.identifier = info.identifier + receiver.name = name or info.model_name + receiver.discovered = discovered - # Store the receiver object and create a dictionary to store its entities. receivers[receiver.host] = receiver - entities[receiver.host] = {} # Discover what zones are available for the receiver by querying the power. # If we get a response for the specific zone, it means it is available. @@ -221,37 +265,41 @@ async def async_setup_platform( main_entity = OnkyoMediaPlayer( receiver, sources, "main", max_volume, receiver_max_volume ) - entities[receiver.host]["main"] = main_entity + entities["main"] = main_entity async_add_entities([main_entity]) - if host is not None and host not in KNOWN_HOSTS: + if host is not None: + if host in KNOWN_HOSTS: + return + _LOGGER.debug("Manually creating receiver: %s (%s)", name, host) - receiver = await pyeiscp.Connection.create( - host=host, - update_callback=async_onkyo_update_callback, - connect_callback=async_onkyo_connect_callback, - ) - - # The library automatically adds a name and identifier only on discovered hosts, - # so manually add them here instead. - receiver.name = name - receiver.identifier = None - - setup_receiver(receiver) - else: @callback - async def async_onkyo_discovery_callback(receiver: pyeiscp.Connection): - """Receiver discovered, connection not yet active.""" - _LOGGER.debug("Receiver discovered: %s (%s)", receiver.name, receiver.host) - if receiver.host not in KNOWN_HOSTS: - await receiver.connect() - setup_receiver(receiver) + async def async_onkyo_interview_callback(conn: pyeiscp.Connection): + """Receiver interviewed, connection not yet active.""" + info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) + _LOGGER.debug("Receiver interviewed: %s (%s)", info.model_name, info.host) + if info.host not in KNOWN_HOSTS: + KNOWN_HOSTS.append(info.host) + await async_setup_receiver(info, False, name) + + await pyeiscp.Connection.discover( + host=host, + discovery_callback=async_onkyo_interview_callback, + ) + else: + _LOGGER.debug("Discovering receivers") + + @callback + async def async_onkyo_discovery_callback(conn: pyeiscp.Connection): + """Receiver discovered, connection not yet active.""" + info = ReceiverInfo(conn.host, conn.port, conn.name, conn.identifier) + _LOGGER.debug("Receiver discovered: %s (%s)", info.model_name, info.host) + if info.host not in KNOWN_HOSTS: + KNOWN_HOSTS.append(info.host) + await async_setup_receiver(info, True, None) - _LOGGER.debug("Discovering receivers") await pyeiscp.Connection.discover( - update_callback=async_onkyo_update_callback, - connect_callback=async_onkyo_connect_callback, discovery_callback=async_onkyo_discovery_callback, ) @@ -279,29 +327,24 @@ class OnkyoMediaPlayer(MediaPlayerEntity): sources: dict[str, str], zone: str, max_volume: int, - receiver_max_volume: int, + volume_resolution: int, ) -> None: """Initialize the Onkyo Receiver.""" self._receiver = receiver name = receiver.name - self._attr_name = f"{name}{' ' + ZONES[zone] if zone != 'main' else ''}" identifier = receiver.identifier - if identifier is not None: - # discovered - if zone == "main": - # keep legacy unique_id - self._attr_unique_id = f"{name}_{identifier}" - else: - self._attr_unique_id = f"{identifier}_{zone}" + self._attr_name = f"{name}{' ' + ZONES[zone] if zone != 'main' else ''}" + if receiver.discovered and zone == "main": + # keep legacy unique_id + self._attr_unique_id = f"{name}_{identifier}" else: - # not discovered - self._attr_unique_id = None + self._attr_unique_id = f"{identifier}_{zone}" self._zone = zone self._source_mapping = sources self._reverse_mapping = {value: key for key, value in sources.items()} self._max_volume = max_volume - self._receiver_max_volume = receiver_max_volume + self._volume_resolution = volume_resolution self._attr_source_list = list(sources.values()) self._attr_extra_state_attributes = {} @@ -350,9 +393,9 @@ class OnkyoMediaPlayer(MediaPlayerEntity): will give 80% volume on the receiver. Then we convert that to the correct scale for the receiver. """ - # HA_VOL * (MAX VOL / 100) * MAX_RECEIVER_VOL + # HA_VOL * (MAX VOL / 100) * VOL_RESOLUTION self._update_receiver( - "volume", int(volume * (self._max_volume / 100) * self._receiver_max_volume) + "volume", int(volume * (self._max_volume / 100) * self._volume_resolution) ) async def async_volume_up(self) -> None: @@ -430,9 +473,9 @@ class OnkyoMediaPlayer(MediaPlayerEntity): self._attr_extra_state_attributes.pop(ATTR_VIDEO_OUT, None) elif command in ["volume", "master-volume"] and value != "N/A": self._supports_volume = True - # AMP_VOL / (MAX_RECEIVER_VOL * (MAX_VOL / 100)) + # AMP_VOL / (VOL_RESOLUTION * (MAX_VOL / 100)) self._attr_volume_level = value / ( - self._receiver_max_volume * self._max_volume / 100 + self._volume_resolution * self._max_volume / 100 ) elif command in ["muting", "audio-muting"]: self._attr_is_volume_muted = bool(value == "on") diff --git a/homeassistant/components/onvif/config_flow.py b/homeassistant/components/onvif/config_flow.py index 36ae0e1bf18..30184d1abc3 100644 --- a/homeassistant/components/onvif/config_flow.py +++ b/homeassistant/components/onvif/config_flow.py @@ -112,13 +112,15 @@ class OnvifFlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return OnvifOptionsFlowHandler(config_entry) - def __init__(self): + def __init__(self) -> None: """Initialize the ONVIF config flow.""" self.device_id = None - self.devices = [] - self.onvif_config = {} + self.devices: list[dict[str, Any]] = [] + self.onvif_config: dict[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user flow.""" if user_input: if user_input["auto"]: diff --git a/homeassistant/components/onvif/icons.json b/homeassistant/components/onvif/icons.json index 4db9a9f9e49..d42985d34e8 100644 --- a/homeassistant/components/onvif/icons.json +++ b/homeassistant/components/onvif/icons.json @@ -13,6 +13,8 @@ } }, "services": { - "ptz": "mdi:pan" + "ptz": { + "service": "mdi:pan" + } } } diff --git a/homeassistant/components/openai_conversation/__init__.py b/homeassistant/components/openai_conversation/__init__.py index 75b5db23094..0fbda9b7f4a 100644 --- a/homeassistant/components/openai_conversation/__init__.py +++ b/homeassistant/components/openai_conversation/__init__.py @@ -19,6 +19,7 @@ from homeassistant.exceptions import ( ServiceValidationError, ) from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers.httpx_client import get_async_client from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, LOGGER @@ -88,7 +89,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def async_setup_entry(hass: HomeAssistant, entry: OpenAIConfigEntry) -> bool: """Set up OpenAI Conversation from a config entry.""" - client = openai.AsyncOpenAI(api_key=entry.data[CONF_API_KEY]) + client = openai.AsyncOpenAI( + api_key=entry.data[CONF_API_KEY], + http_client=get_async_client(hass), + ) + + # Cache current platform data which gets added to each request (caching done by library) + _ = await hass.async_add_executor_job(client.platform_headers) + try: await hass.async_add_executor_job(client.with_options(timeout=10.0).models.list) except openai.AuthenticationError as err: diff --git a/homeassistant/components/openai_conversation/icons.json b/homeassistant/components/openai_conversation/icons.json index 7f736a5ff3b..3abecd640d1 100644 --- a/homeassistant/components/openai_conversation/icons.json +++ b/homeassistant/components/openai_conversation/icons.json @@ -1,5 +1,7 @@ { "services": { - "generate_image": "mdi:image-sync" + "generate_image": { + "service": "mdi:image-sync" + } } } diff --git a/homeassistant/components/openhome/icons.json b/homeassistant/components/openhome/icons.json index 081e97c3489..d75659f17da 100644 --- a/homeassistant/components/openhome/icons.json +++ b/homeassistant/components/openhome/icons.json @@ -1,5 +1,7 @@ { "services": { - "invoke_pin": "mdi:alarm-panel" + "invoke_pin": { + "service": "mdi:alarm-panel" + } } } diff --git a/homeassistant/components/opentherm_gw/__init__.py b/homeassistant/components/opentherm_gw/__init__.py index a0d791fddd4..30410f73c2d 100644 --- a/homeassistant/components/opentherm_gw/__init__.py +++ b/homeassistant/components/opentherm_gw/__init__.py @@ -99,7 +99,7 @@ async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> b if DATA_OPENTHERM_GW not in hass.data: hass.data[DATA_OPENTHERM_GW] = {DATA_GATEWAYS: {}} - gateway = OpenThermGatewayDevice(hass, config_entry) + gateway = OpenThermGatewayHub(hass, config_entry) hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] = gateway if config_entry.options.get(CONF_PRECISION): @@ -273,9 +273,9 @@ def register_services(hass: HomeAssistant) -> None: async def reset_gateway(call: ServiceCall) -> None: """Reset the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] mode_rst = gw_vars.OTGW_MODE_RESET - await gw_dev.gateway.set_mode(mode_rst) + await gw_hub.gateway.set_mode(mode_rst) hass.services.async_register( DOMAIN, SERVICE_RESET_GATEWAY, reset_gateway, service_reset_schema @@ -283,8 +283,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_ch_ovrd(call: ServiceCall) -> None: """Set the central heating override on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_ch_enable_bit(1 if call.data[ATTR_CH_OVRD] else 0) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_ch_enable_bit(1 if call.data[ATTR_CH_OVRD] else 0) hass.services.async_register( DOMAIN, @@ -295,8 +295,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_control_setpoint(call: ServiceCall) -> None: """Set the control setpoint on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_control_setpoint(call.data[ATTR_TEMPERATURE]) hass.services.async_register( DOMAIN, @@ -307,8 +307,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_dhw_ovrd(call: ServiceCall) -> None: """Set the domestic hot water override on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_hot_water_ovrd(call.data[ATTR_DHW_OVRD]) hass.services.async_register( DOMAIN, @@ -319,8 +319,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_dhw_setpoint(call: ServiceCall) -> None: """Set the domestic hot water setpoint on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_dhw_setpoint(call.data[ATTR_TEMPERATURE]) hass.services.async_register( DOMAIN, @@ -331,10 +331,10 @@ def register_services(hass: HomeAssistant) -> None: async def set_device_clock(call: ServiceCall) -> None: """Set the clock on the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] attr_date = call.data[ATTR_DATE] attr_time = call.data[ATTR_TIME] - await gw_dev.gateway.set_clock(datetime.combine(attr_date, attr_time)) + await gw_hub.gateway.set_clock(datetime.combine(attr_date, attr_time)) hass.services.async_register( DOMAIN, SERVICE_SET_CLOCK, set_device_clock, service_set_clock_schema @@ -342,10 +342,10 @@ def register_services(hass: HomeAssistant) -> None: async def set_gpio_mode(call: ServiceCall) -> None: """Set the OpenTherm Gateway GPIO modes.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] gpio_id = call.data[ATTR_ID] gpio_mode = call.data[ATTR_MODE] - await gw_dev.gateway.set_gpio_mode(gpio_id, gpio_mode) + await gw_hub.gateway.set_gpio_mode(gpio_id, gpio_mode) hass.services.async_register( DOMAIN, SERVICE_SET_GPIO_MODE, set_gpio_mode, service_set_gpio_mode_schema @@ -353,10 +353,10 @@ def register_services(hass: HomeAssistant) -> None: async def set_led_mode(call: ServiceCall) -> None: """Set the OpenTherm Gateway LED modes.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] led_id = call.data[ATTR_ID] led_mode = call.data[ATTR_MODE] - await gw_dev.gateway.set_led_mode(led_id, led_mode) + await gw_hub.gateway.set_led_mode(led_id, led_mode) hass.services.async_register( DOMAIN, SERVICE_SET_LED_MODE, set_led_mode, service_set_led_mode_schema @@ -364,12 +364,12 @@ def register_services(hass: HomeAssistant) -> None: async def set_max_mod(call: ServiceCall) -> None: """Set the max modulation level.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] level = call.data[ATTR_LEVEL] if level == -1: # Backend only clears setting on non-numeric values. level = "-" - await gw_dev.gateway.set_max_relative_mod(level) + await gw_hub.gateway.set_max_relative_mod(level) hass.services.async_register( DOMAIN, SERVICE_SET_MAX_MOD, set_max_mod, service_set_max_mod_schema @@ -377,8 +377,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_outside_temp(call: ServiceCall) -> None: """Provide the outside temperature to the OpenTherm Gateway.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_outside_temp(call.data[ATTR_TEMPERATURE]) hass.services.async_register( DOMAIN, SERVICE_SET_OAT, set_outside_temp, service_set_oat_schema @@ -386,8 +386,8 @@ def register_services(hass: HomeAssistant) -> None: async def set_setback_temp(call: ServiceCall) -> None: """Set the OpenTherm Gateway SetBack temperature.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] - await gw_dev.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE]) + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + await gw_hub.gateway.set_setback_temp(call.data[ATTR_TEMPERATURE]) hass.services.async_register( DOMAIN, SERVICE_SET_SB_TEMP, set_setback_temp, service_set_sb_temp_schema @@ -395,10 +395,10 @@ def register_services(hass: HomeAssistant) -> None: async def send_transparent_cmd(call: ServiceCall) -> None: """Send a transparent OpenTherm Gateway command.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][call.data[ATTR_GW_ID]] transp_cmd = call.data[ATTR_TRANSP_CMD] transp_arg = call.data[ATTR_TRANSP_ARG] - await gw_dev.gateway.send_transparent_command(transp_cmd, transp_arg) + await gw_hub.gateway.send_transparent_command(transp_cmd, transp_arg) hass.services.async_register( DOMAIN, @@ -416,20 +416,20 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: return unload_ok -class OpenThermGatewayDevice: - """OpenTherm Gateway device class.""" +class OpenThermGatewayHub: + """OpenTherm Gateway hub class.""" def __init__(self, hass: HomeAssistant, config_entry: ConfigEntry) -> None: """Initialize the OpenTherm Gateway.""" self.hass = hass self.device_path = config_entry.data[CONF_DEVICE] - self.gw_id = config_entry.data[CONF_ID] + self.hub_id = config_entry.data[CONF_ID] self.name = config_entry.data[CONF_NAME] self.climate_config = config_entry.options self.config_entry_id = config_entry.entry_id self.status = gw_vars.DEFAULT_STATUS - self.update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_update" - self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.gw_id}_options_update" + self.update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_update" + self.options_update_signal = f"{DATA_OPENTHERM_GW}_{self.hub_id}_options_update" self.gateway = pyotgw.OpenThermGateway() self.gw_version = None @@ -453,7 +453,7 @@ class OpenThermGatewayDevice: dev_reg = dr.async_get(self.hass) gw_dev = dev_reg.async_get_or_create( config_entry_id=self.config_entry_id, - identifiers={(DOMAIN, self.gw_id)}, + identifiers={(DOMAIN, self.hub_id)}, name=self.name, manufacturer="Schelte Bron", model="OpenTherm Gateway", diff --git a/homeassistant/components/opentherm_gw/binary_sensor.py b/homeassistant/components/opentherm_gw/binary_sensor.py index 7c3760653e8..f978a2695d7 100644 --- a/homeassistant/components/opentherm_gw/binary_sensor.py +++ b/homeassistant/components/opentherm_gw/binary_sensor.py @@ -1,25 +1,287 @@ """Support for OpenTherm Gateway binary sensors.""" -import logging +from dataclasses import dataclass -from homeassistant.components.binary_sensor import ENTITY_ID_FORMAT, BinarySensorEntity +from pyotgw import vars as gw_vars + +from homeassistant.components.binary_sensor import ( + ENTITY_ID_FORMAT, + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN -from .const import ( - BINARY_SENSOR_INFO, - DATA_GATEWAYS, - DATA_OPENTHERM_GW, - TRANSLATE_SOURCE, -) +from . import OpenThermGatewayHub +from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW +from .entity import OpenThermEntity, OpenThermEntityDescription -_LOGGER = logging.getLogger(__name__) + +@dataclass(frozen=True, kw_only=True) +class OpenThermBinarySensorEntityDescription( + BinarySensorEntityDescription, OpenThermEntityDescription +): + """Describes opentherm_gw binary sensor entity.""" + + +BINARY_SENSOR_INFO: tuple[ + tuple[list[str], OpenThermBinarySensorEntityDescription], ... +] = ( + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH_ENABLED, + friendly_name_format="Thermostat Central Heating {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_DHW_ENABLED, + friendly_name_format="Thermostat Hot Water {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_COOLING_ENABLED, + friendly_name_format="Thermostat Cooling {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_OTC_ENABLED, + friendly_name_format="Thermostat Outside Temperature Correction {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_MASTER_CH2_ENABLED, + friendly_name_format="Thermostat Central Heating 2 {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FAULT_IND, + friendly_name_format="Boiler Fault {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_ACTIVE, + friendly_name_format="Boiler Central Heating {}", + device_class=BinarySensorDeviceClass.HEAT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_ACTIVE, + friendly_name_format="Boiler Hot Water {}", + device_class=BinarySensorDeviceClass.HEAT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_FLAME_ON, + friendly_name_format="Boiler Flame {}", + device_class=BinarySensorDeviceClass.HEAT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_ACTIVE, + friendly_name_format="Boiler Cooling {}", + device_class=BinarySensorDeviceClass.COLD, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_ACTIVE, + friendly_name_format="Boiler Central Heating 2 {}", + device_class=BinarySensorDeviceClass.HEAT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DIAG_IND, + friendly_name_format="Boiler Diagnostics {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_PRESENT, + friendly_name_format="Boiler Hot Water Present {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CONTROL_TYPE, + friendly_name_format="Boiler Control Type {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_COOLING_SUPPORTED, + friendly_name_format="Boiler Cooling Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_CONFIG, + friendly_name_format="Boiler Hot Water Configuration {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP, + friendly_name_format="Boiler Pump Commands Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH2_PRESENT, + friendly_name_format="Boiler Central Heating 2 Present {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_SERVICE_REQ, + friendly_name_format="Boiler Service Required {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_REMOTE_RESET, + friendly_name_format="Boiler Remote Reset Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_LOW_WATER_PRESS, + friendly_name_format="Boiler Low Water Pressure {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_GAS_FAULT, + friendly_name_format="Boiler Gas Fault {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_AIR_PRESS_FAULT, + friendly_name_format="Boiler Air Pressure Fault {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_SLAVE_WATER_OVERTEMP, + friendly_name_format="Boiler Water Overtemperature {}", + device_class=BinarySensorDeviceClass.PROBLEM, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_DHW, + friendly_name_format="Remote Hot Water Setpoint Transfer Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_TRANSFER_MAX_CH, + friendly_name_format="Remote Maximum Central Heating Setpoint Write Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_DHW, + friendly_name_format="Remote Hot Water Setpoint Write Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_REMOTE_RW_MAX_CH, + friendly_name_format="Remote Central Heating Setpoint Write Support {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_MAN_PRIO, + friendly_name_format="Remote Override Manual Change Priority {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermBinarySensorEntityDescription( + key=gw_vars.DATA_ROVRD_AUTO_PRIO, + friendly_name_format="Remote Override Program Change Priority {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_GPIO_A_STATE, + friendly_name_format="Gateway GPIO A {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_GPIO_B_STATE, + friendly_name_format="Gateway GPIO B {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_IGNORE_TRANSITIONS, + friendly_name_format="Gateway Ignore Transitions {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermBinarySensorEntityDescription( + key=gw_vars.OTGW_OVRD_HB, + friendly_name_format="Gateway Override High Byte {}", + ), + ), +) async def async_setup_entry( @@ -28,68 +290,38 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the OpenTherm Gateway binary sensors.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] async_add_entities( - OpenThermBinarySensor( - gw_dev, - var, - source, - info[0], - info[1], - ) - for var, info in BINARY_SENSOR_INFO.items() - for source in info[2] + OpenThermBinarySensor(gw_hub, source, description) + for sources, description in BINARY_SENSOR_INFO + for source in sources ) -class OpenThermBinarySensor(BinarySensorEntity): +class OpenThermBinarySensor(OpenThermEntity, BinarySensorEntity): """Represent an OpenTherm Gateway binary sensor.""" - _attr_should_poll = False - _attr_entity_registry_enabled_default = False - _attr_available = False + entity_description: OpenThermBinarySensorEntityDescription - def __init__(self, gw_dev, var, source, device_class, friendly_name_format): + def __init__( + self, + gw_hub: OpenThermGatewayHub, + source: str, + description: OpenThermBinarySensorEntityDescription, + ) -> None: """Initialize the binary sensor.""" self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, f"{var}_{source}_{gw_dev.gw_id}", hass=gw_dev.hass + ENTITY_ID_FORMAT, + f"{description.key}_{source}_{gw_hub.hub_id}", + hass=gw_hub.hass, ) - self._gateway = gw_dev - self._var = var - self._source = source - self._attr_device_class = device_class - if TRANSLATE_SOURCE[source] is not None: - friendly_name_format = ( - f"{friendly_name_format} ({TRANSLATE_SOURCE[source]})" - ) - self._attr_name = friendly_name_format.format(gw_dev.name) - self._unsub_updates = None - self._attr_unique_id = f"{gw_dev.gw_id}-{source}-{var}" - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_dev.gw_id)}, - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - name=gw_dev.name, - sw_version=gw_dev.gw_version, - ) - - async def async_added_to_hass(self) -> None: - """Subscribe to updates from the component.""" - _LOGGER.debug("Added OpenTherm Gateway binary sensor %s", self._attr_name) - self._unsub_updates = async_dispatcher_connect( - self.hass, self._gateway.update_signal, self.receive_report - ) - - async def async_will_remove_from_hass(self) -> None: - """Unsubscribe from updates from the component.""" - _LOGGER.debug("Removing OpenTherm Gateway binary sensor %s", self._attr_name) - self._unsub_updates() + super().__init__(gw_hub, source, description) @callback - def receive_report(self, status): + def receive_report(self, status: dict[str, dict]) -> None: """Handle status updates from the component.""" self._attr_available = self._gateway.connected - state = status[self._source].get(self._var) + state = status[self._source].get(self.entity_description.key) self._attr_is_on = None if state is None else bool(state) self.async_write_ha_state() diff --git a/homeassistant/components/opentherm_gw/climate.py b/homeassistant/components/opentherm_gw/climate.py index 5eb1246e55f..bf295fb1fb7 100644 --- a/homeassistant/components/opentherm_gw/climate.py +++ b/homeassistant/components/opentherm_gw/climate.py @@ -87,13 +87,13 @@ class OpenThermClimate(ClimateEntity): _current_operation: HVACAction | None = None _enable_turn_on_off_backwards_compatibility = False - def __init__(self, gw_dev, options): + def __init__(self, gw_hub, options): """Initialize the device.""" - self._gateway = gw_dev + self._gateway = gw_hub self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, gw_dev.gw_id, hass=gw_dev.hass + ENTITY_ID_FORMAT, gw_hub.hub_id, hass=gw_hub.hass ) - self.friendly_name = gw_dev.name + self.friendly_name = gw_hub.name self._attr_name = self.friendly_name self.floor_temp = options.get(CONF_FLOOR_TEMP, DEFAULT_FLOOR_TEMP) self.temp_read_precision = options.get(CONF_READ_PRECISION) @@ -102,13 +102,13 @@ class OpenThermClimate(ClimateEntity): self._unsub_options = None self._unsub_updates = None self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_dev.gw_id)}, + identifiers={(DOMAIN, gw_hub.hub_id)}, manufacturer="Schelte Bron", model="OpenTherm Gateway", - name=gw_dev.name, - sw_version=gw_dev.gw_version, + name=gw_hub.name, + sw_version=gw_hub.gw_version, ) - self._attr_unique_id = gw_dev.gw_id + self._attr_unique_id = gw_hub.hub_id @callback def update_options(self, entry): diff --git a/homeassistant/components/opentherm_gw/config_flow.py b/homeassistant/components/opentherm_gw/config_flow.py index 19906689b57..c1d1caa2fb0 100644 --- a/homeassistant/components/opentherm_gw/config_flow.py +++ b/homeassistant/components/opentherm_gw/config_flow.py @@ -3,13 +3,19 @@ from __future__ import annotations import asyncio +from typing import Any import pyotgw from pyotgw import vars as gw_vars from serial import SerialException import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import ( CONF_DEVICE, CONF_ID, @@ -80,19 +86,21 @@ class OpenThermGwConfigFlow(ConfigFlow, domain=DOMAIN): return self._show_form() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle manual initiation of the config flow.""" return await self.async_step_init(user_input) - async def async_step_import(self, import_config): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import an OpenTherm Gateway device as a config entry. This flow is triggered by `async_setup` for configured devices. """ formatted_config = { - CONF_NAME: import_config.get(CONF_NAME, import_config[CONF_ID]), - CONF_DEVICE: import_config[CONF_DEVICE], - CONF_ID: import_config[CONF_ID], + CONF_NAME: import_data.get(CONF_NAME, import_data[CONF_ID]), + CONF_DEVICE: import_data[CONF_DEVICE], + CONF_ID: import_data[CONF_ID], } return await self.async_step_init(info=formatted_config) diff --git a/homeassistant/components/opentherm_gw/const.py b/homeassistant/components/opentherm_gw/const.py index 6b0a27aec92..c1932c7b2bd 100644 --- a/homeassistant/components/opentherm_gw/const.py +++ b/homeassistant/components/opentherm_gw/const.py @@ -1,20 +1,5 @@ """Constants for the opentherm_gw integration.""" -from __future__ import annotations - -import pyotgw.vars as gw_vars - -from homeassistant.components.binary_sensor import BinarySensorDeviceClass -from homeassistant.components.sensor import SensorDeviceClass -from homeassistant.const import ( - PERCENTAGE, - UnitOfPower, - UnitOfPressure, - UnitOfTemperature, - UnitOfTime, - UnitOfVolumeFlowRate, -) - ATTR_GW_ID = "gateway_id" ATTR_LEVEL = "level" ATTR_DHW_OVRD = "dhw_override" @@ -48,625 +33,3 @@ SERVICE_SET_MAX_MOD = "set_max_modulation" SERVICE_SET_OAT = "set_outside_temperature" SERVICE_SET_SB_TEMP = "set_setback_temperature" SERVICE_SEND_TRANSP_CMD = "send_transparent_command" - -TRANSLATE_SOURCE = { - gw_vars.BOILER: "Boiler", - gw_vars.OTGW: None, - gw_vars.THERMOSTAT: "Thermostat", -} - -SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 - -BINARY_SENSOR_INFO: dict[str, list] = { - # [device_class, friendly_name format, [status source, ...]] - gw_vars.DATA_MASTER_CH_ENABLED: [ - None, - "Thermostat Central Heating {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_DHW_ENABLED: [ - None, - "Thermostat Hot Water {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_COOLING_ENABLED: [ - None, - "Thermostat Cooling {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_OTC_ENABLED: [ - None, - "Thermostat Outside Temperature Correction {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_CH2_ENABLED: [ - None, - "Thermostat Central Heating 2 {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_FAULT_IND: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Fault {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH_ACTIVE: [ - BinarySensorDeviceClass.HEAT, - "Boiler Central Heating {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_ACTIVE: [ - BinarySensorDeviceClass.HEAT, - "Boiler Hot Water {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_FLAME_ON: [ - BinarySensorDeviceClass.HEAT, - "Boiler Flame {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_COOLING_ACTIVE: [ - BinarySensorDeviceClass.COLD, - "Boiler Cooling {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH2_ACTIVE: [ - BinarySensorDeviceClass.HEAT, - "Boiler Central Heating 2 {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DIAG_IND: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Diagnostics {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_PRESENT: [ - None, - "Boiler Hot Water Present {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CONTROL_TYPE: [ - None, - "Boiler Control Type {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_COOLING_SUPPORTED: [ - None, - "Boiler Cooling Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_CONFIG: [ - None, - "Boiler Hot Water Configuration {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MASTER_LOW_OFF_PUMP: [ - None, - "Boiler Pump Commands Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH2_PRESENT: [ - None, - "Boiler Central Heating 2 Present {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_SERVICE_REQ: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Service Required {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_REMOTE_RESET: [ - None, - "Boiler Remote Reset Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_LOW_WATER_PRESS: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Low Water Pressure {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_GAS_FAULT: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Gas Fault {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_AIR_PRESS_FAULT: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Air Pressure Fault {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_WATER_OVERTEMP: [ - BinarySensorDeviceClass.PROBLEM, - "Boiler Water Overtemperature {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REMOTE_TRANSFER_DHW: [ - None, - "Remote Hot Water Setpoint Transfer Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REMOTE_TRANSFER_MAX_CH: [ - None, - "Remote Maximum Central Heating Setpoint Write Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REMOTE_RW_DHW: [ - None, - "Remote Hot Water Setpoint Write Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REMOTE_RW_MAX_CH: [ - None, - "Remote Central Heating Setpoint Write Support {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROVRD_MAN_PRIO: [ - None, - "Remote Override Manual Change Priority {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROVRD_AUTO_PRIO: [ - None, - "Remote Override Program Change Priority {}", - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.OTGW_GPIO_A_STATE: [None, "Gateway GPIO A {}", [gw_vars.OTGW]], - gw_vars.OTGW_GPIO_B_STATE: [None, "Gateway GPIO B {}", [gw_vars.OTGW]], - gw_vars.OTGW_IGNORE_TRANSITIONS: [ - None, - "Gateway Ignore Transitions {}", - [gw_vars.OTGW], - ], - gw_vars.OTGW_OVRD_HB: [None, "Gateway Override High Byte {}", [gw_vars.OTGW]], -} - -SENSOR_INFO: dict[str, list] = { - # [device_class, unit, friendly_name, suggested_display_precision, [status source, ...]] - gw_vars.DATA_CONTROL_SETPOINT: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Control Setpoint {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_MEMBERID: [ - None, - None, - "Thermostat Member ID {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MEMBERID: [ - None, - None, - "Boiler Member ID {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_OEM_FAULT: [ - None, - None, - "Boiler OEM Fault Code {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_COOLING_CONTROL: [ - None, - PERCENTAGE, - "Cooling Control Signal {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CONTROL_SETPOINT_2: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Control Setpoint 2 {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROOM_SETPOINT_OVRD: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Room Setpoint Override {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD: [ - None, - PERCENTAGE, - "Boiler Maximum Relative Modulation {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MAX_CAPACITY: [ - SensorDeviceClass.POWER, - UnitOfPower.KILO_WATT, - "Boiler Maximum Capacity {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_MIN_MOD_LEVEL: [ - None, - PERCENTAGE, - "Boiler Minimum Modulation Level {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROOM_SETPOINT: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Room Setpoint {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_REL_MOD_LEVEL: [ - None, - PERCENTAGE, - "Relative Modulation Level {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_WATER_PRESS: [ - SensorDeviceClass.PRESSURE, - UnitOfPressure.BAR, - "Central Heating Water Pressure {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_FLOW_RATE: [ - SensorDeviceClass.VOLUME_FLOW_RATE, - UnitOfVolumeFlowRate.LITERS_PER_MINUTE, - "Hot Water Flow Rate {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROOM_SETPOINT_2: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Room Setpoint 2 {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_ROOM_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Room Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_WATER_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Central Heating Water Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_OUTSIDE_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Outside Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_RETURN_WATER_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Return Water Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SOLAR_STORAGE_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Solar Storage Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SOLAR_COLL_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Solar Collector Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_WATER_TEMP_2: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Central Heating 2 Water Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_TEMP_2: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water 2 Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_EXHAUST_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Exhaust Temperature {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_MAX_SETP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water Maximum Setpoint {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_DHW_MIN_SETP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water Minimum Setpoint {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH_MAX_SETP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Boiler Maximum Central Heating Setpoint {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_CH_MIN_SETP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Boiler Minimum Central Heating Setpoint {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_SETPOINT: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Hot Water Setpoint {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MAX_CH_SETPOINT: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Maximum Central Heating Setpoint {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_OEM_DIAG: [ - None, - None, - "OEM Diagnostic Code {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_TOTAL_BURNER_STARTS: [ - None, - "starts", - "Total Burner Starts {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_PUMP_STARTS: [ - None, - "starts", - "Central Heating Pump Starts {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_PUMP_STARTS: [ - None, - "starts", - "Hot Water Pump Starts {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_BURNER_STARTS: [ - None, - "starts", - "Hot Water Burner Starts {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_TOTAL_BURNER_HOURS: [ - SensorDeviceClass.DURATION, - UnitOfTime.HOURS, - "Total Burner Hours {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_CH_PUMP_HOURS: [ - SensorDeviceClass.DURATION, - UnitOfTime.HOURS, - "Central Heating Pump Hours {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_PUMP_HOURS: [ - SensorDeviceClass.DURATION, - UnitOfTime.HOURS, - "Hot Water Pump Hours {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_DHW_BURNER_HOURS: [ - SensorDeviceClass.DURATION, - UnitOfTime.HOURS, - "Hot Water Burner Hours {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_OT_VERSION: [ - None, - None, - "Thermostat OpenTherm Version {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_OT_VERSION: [ - None, - None, - "Boiler OpenTherm Version {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_PRODUCT_TYPE: [ - None, - None, - "Thermostat Product Type {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_MASTER_PRODUCT_VERSION: [ - None, - None, - "Thermostat Product Version {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_PRODUCT_TYPE: [ - None, - None, - "Boiler Product Type {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.DATA_SLAVE_PRODUCT_VERSION: [ - None, - None, - "Boiler Product Version {}", - None, - [gw_vars.BOILER, gw_vars.THERMOSTAT], - ], - gw_vars.OTGW_MODE: [ - None, - None, - "Gateway/Monitor Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_DHW_OVRD: [ - None, - None, - "Gateway Hot Water Override Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_ABOUT: [ - None, - None, - "Gateway Firmware Version {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_BUILD: [ - None, - None, - "Gateway Firmware Build {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_CLOCKMHZ: [ - None, - None, - "Gateway Clock Speed {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_A: [ - None, - None, - "Gateway LED A Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_B: [ - None, - None, - "Gateway LED B Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_C: [ - None, - None, - "Gateway LED C Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_D: [ - None, - None, - "Gateway LED D Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_E: [ - None, - None, - "Gateway LED E Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_LED_F: [ - None, - None, - "Gateway LED F Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_GPIO_A: [ - None, - None, - "Gateway GPIO A Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_GPIO_B: [ - None, - None, - "Gateway GPIO B Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_SB_TEMP: [ - SensorDeviceClass.TEMPERATURE, - UnitOfTemperature.CELSIUS, - "Gateway Setback Temperature {}", - SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, - [gw_vars.OTGW], - ], - gw_vars.OTGW_SETP_OVRD_MODE: [ - None, - None, - "Gateway Room Setpoint Override Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_SMART_PWR: [ - None, - None, - "Gateway Smart Power Mode {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_THRM_DETECT: [ - None, - None, - "Gateway Thermostat Detection {}", - None, - [gw_vars.OTGW], - ], - gw_vars.OTGW_VREF: [ - None, - None, - "Gateway Reference Voltage Setting {}", - None, - [gw_vars.OTGW], - ], -} diff --git a/homeassistant/components/opentherm_gw/entity.py b/homeassistant/components/opentherm_gw/entity.py new file mode 100644 index 00000000000..a1035b946c2 --- /dev/null +++ b/homeassistant/components/opentherm_gw/entity.py @@ -0,0 +1,76 @@ +"""Common opentherm_gw entity properties.""" + +import logging + +import pyotgw.vars as gw_vars + +from homeassistant.core import callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entity import Entity, EntityDescription + +from . import OpenThermGatewayHub +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +TRANSLATE_SOURCE = { + gw_vars.BOILER: "Boiler", + gw_vars.OTGW: None, + gw_vars.THERMOSTAT: "Thermostat", +} + + +class OpenThermEntityDescription(EntityDescription): + """Describe common opentherm_gw entity properties.""" + + friendly_name_format: str + + +class OpenThermEntity(Entity): + """Represent an OpenTherm Gateway entity.""" + + _attr_should_poll = False + _attr_entity_registry_enabled_default = False + _attr_available = False + entity_description: OpenThermEntityDescription + + def __init__( + self, + gw_hub: OpenThermGatewayHub, + source: str, + description: OpenThermEntityDescription, + ) -> None: + """Initialize the entity.""" + self.entity_description = description + self._gateway = gw_hub + self._source = source + friendly_name_format = ( + f"{description.friendly_name_format} ({TRANSLATE_SOURCE[source]})" + if TRANSLATE_SOURCE[source] is not None + else description.friendly_name_format + ) + self._attr_name = friendly_name_format.format(gw_hub.name) + self._attr_unique_id = f"{gw_hub.hub_id}-{source}-{description.key}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, gw_hub.hub_id)}, + manufacturer="Schelte Bron", + model="OpenTherm Gateway", + name=gw_hub.name, + sw_version=gw_hub.gw_version, + ) + + async def async_added_to_hass(self) -> None: + """Subscribe to updates from the component.""" + _LOGGER.debug("Added OpenTherm Gateway entity %s", self._attr_name) + self.async_on_remove( + async_dispatcher_connect( + self.hass, self._gateway.update_signal, self.receive_report + ) + ) + + @callback + def receive_report(self, status: dict[str, dict]) -> None: + """Handle status updates from the component.""" + # Must be implemented at the platform level. + raise NotImplementedError diff --git a/homeassistant/components/opentherm_gw/icons.json b/homeassistant/components/opentherm_gw/icons.json index 13dbe0a70a1..37942aa0e63 100644 --- a/homeassistant/components/opentherm_gw/icons.json +++ b/homeassistant/components/opentherm_gw/icons.json @@ -1,16 +1,40 @@ { "services": { - "reset_gateway": "mdi:reload", - "set_central_heating_ovrd": "mdi:heat-wave", - "set_clock": "mdi:clock", - "set_control_setpoint": "mdi:thermometer-lines", - "set_hot_water_ovrd": "mdi:thermometer-lines", - "set_hot_water_setpoint": "mdi:thermometer-lines", - "set_gpio_mode": "mdi:cable-data", - "set_led_mode": "mdi:led-on", - "set_max_modulation": "mdi:thermometer-lines", - "set_outside_temperature": "mdi:thermometer-lines", - "set_setback_temperature": "mdi:thermometer-lines", - "send_transparent_command": "mdi:console" + "reset_gateway": { + "service": "mdi:reload" + }, + "set_central_heating_ovrd": { + "service": "mdi:heat-wave" + }, + "set_clock": { + "service": "mdi:clock" + }, + "set_control_setpoint": { + "service": "mdi:thermometer-lines" + }, + "set_hot_water_ovrd": { + "service": "mdi:thermometer-lines" + }, + "set_hot_water_setpoint": { + "service": "mdi:thermometer-lines" + }, + "set_gpio_mode": { + "service": "mdi:cable-data" + }, + "set_led_mode": { + "service": "mdi:led-on" + }, + "set_max_modulation": { + "service": "mdi:thermometer-lines" + }, + "set_outside_temperature": { + "service": "mdi:thermometer-lines" + }, + "set_setback_temperature": { + "service": "mdi:thermometer-lines" + }, + "send_transparent_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/opentherm_gw/sensor.py b/homeassistant/components/opentherm_gw/sensor.py index 8c17aca4516..fb30b2ce35c 100644 --- a/homeassistant/components/opentherm_gw/sensor.py +++ b/homeassistant/components/opentherm_gw/sensor.py @@ -1,20 +1,621 @@ """Support for OpenTherm Gateway sensors.""" -import logging +from dataclasses import dataclass -from homeassistant.components.sensor import ENTITY_ID_FORMAT, SensorEntity +import pyotgw.vars as gw_vars + +from homeassistant.components.sensor import ( + ENTITY_ID_FORMAT, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_ID +from homeassistant.const import ( + CONF_ID, + PERCENTAGE, + UnitOfPower, + UnitOfPressure, + UnitOfTemperature, + UnitOfTime, + UnitOfVolumeFlowRate, +) from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers.device_registry import DeviceInfo -from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.helpers.entity import async_generate_entity_id from homeassistant.helpers.entity_platform import AddEntitiesCallback -from . import DOMAIN -from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW, SENSOR_INFO, TRANSLATE_SOURCE +from . import OpenThermGatewayHub +from .const import DATA_GATEWAYS, DATA_OPENTHERM_GW +from .entity import OpenThermEntity, OpenThermEntityDescription -_LOGGER = logging.getLogger(__name__) +SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION = 1 + + +@dataclass(frozen=True, kw_only=True) +class OpenThermSensorEntityDescription( + SensorEntityDescription, OpenThermEntityDescription +): + """Describes opentherm_gw sensor entity.""" + + +SENSOR_INFO: tuple[tuple[list[str], OpenThermSensorEntityDescription], ...] = ( + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT, + friendly_name_format="Control Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_MEMBERID, + friendly_name_format="Thermostat Member ID {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MEMBERID, + friendly_name_format="Boiler Member ID {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OEM_FAULT, + friendly_name_format="Boiler OEM Fault Code {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_COOLING_CONTROL, + friendly_name_format="Cooling Control Signal {}", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CONTROL_SETPOINT_2, + friendly_name_format="Control Setpoint 2 {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_OVRD, + friendly_name_format="Room Setpoint Override {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_RELATIVE_MOD, + friendly_name_format="Boiler Maximum Relative Modulation {}", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MAX_CAPACITY, + friendly_name_format="Boiler Maximum Capacity {}", + state_class=SensorStateClass.MEASUREMENT, + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.KILO_WATT, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_MIN_MOD_LEVEL, + friendly_name_format="Boiler Minimum Modulation Level {}", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT, + friendly_name_format="Room Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_REL_MOD_LEVEL, + friendly_name_format="Relative Modulation Level {}", + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=PERCENTAGE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_PRESS, + friendly_name_format="Central Heating Water Pressure {}", + device_class=SensorDeviceClass.PRESSURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfPressure.BAR, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_FLOW_RATE, + friendly_name_format="Hot Water Flow Rate {}", + device_class=SensorDeviceClass.VOLUME_FLOW_RATE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_SETPOINT_2, + friendly_name_format="Room Setpoint 2 {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_ROOM_TEMP, + friendly_name_format="Room Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP, + friendly_name_format="Central Heating Water Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP, + friendly_name_format="Hot Water Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OUTSIDE_TEMP, + friendly_name_format="Outside Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_RETURN_WATER_TEMP, + friendly_name_format="Return Water Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_STORAGE_TEMP, + friendly_name_format="Solar Storage Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SOLAR_COLL_TEMP, + friendly_name_format="Solar Collector Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_WATER_TEMP_2, + friendly_name_format="Central Heating 2 Water Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_TEMP_2, + friendly_name_format="Hot Water 2 Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_EXHAUST_TEMP, + friendly_name_format="Exhaust Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MAX_SETP, + friendly_name_format="Hot Water Maximum Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_DHW_MIN_SETP, + friendly_name_format="Hot Water Minimum Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MAX_SETP, + friendly_name_format="Boiler Maximum Central Heating Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_CH_MIN_SETP, + friendly_name_format="Boiler Minimum Central Heating Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_SETPOINT, + friendly_name_format="Hot Water Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MAX_CH_SETPOINT, + friendly_name_format="Maximum Central Heating Setpoint {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_OEM_DIAG, + friendly_name_format="OEM Diagnostic Code {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_STARTS, + friendly_name_format="Total Burner Starts {}", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_STARTS, + friendly_name_format="Central Heating Pump Starts {}", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_STARTS, + friendly_name_format="Hot Water Pump Starts {}", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_STARTS, + friendly_name_format="Hot Water Burner Starts {}", + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement="starts", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_TOTAL_BURNER_HOURS, + friendly_name_format="Total Burner Hours {}", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_CH_PUMP_HOURS, + friendly_name_format="Central Heating Pump Hours {}", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_PUMP_HOURS, + friendly_name_format="Hot Water Pump Hours {}", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_DHW_BURNER_HOURS, + friendly_name_format="Hot Water Burner Hours {}", + device_class=SensorDeviceClass.DURATION, + state_class=SensorStateClass.TOTAL, + native_unit_of_measurement=UnitOfTime.HOURS, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_OT_VERSION, + friendly_name_format="Thermostat OpenTherm Version {}", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_OT_VERSION, + friendly_name_format="Boiler OpenTherm Version {}", + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_TYPE, + friendly_name_format="Thermostat Product Type {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_MASTER_PRODUCT_VERSION, + friendly_name_format="Thermostat Product Version {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_TYPE, + friendly_name_format="Boiler Product Type {}", + ), + ), + ( + [gw_vars.BOILER, gw_vars.THERMOSTAT], + OpenThermSensorEntityDescription( + key=gw_vars.DATA_SLAVE_PRODUCT_VERSION, + friendly_name_format="Boiler Product Version {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_MODE, + friendly_name_format="Gateway/Monitor Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_DHW_OVRD, + friendly_name_format="Gateway Hot Water Override Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_ABOUT, + friendly_name_format="Gateway Firmware Version {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_BUILD, + friendly_name_format="Gateway Firmware Build {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_CLOCKMHZ, + friendly_name_format="Gateway Clock Speed {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_A, + friendly_name_format="Gateway LED A Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_B, + friendly_name_format="Gateway LED B Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_C, + friendly_name_format="Gateway LED C Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_D, + friendly_name_format="Gateway LED D Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_E, + friendly_name_format="Gateway LED E Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_LED_F, + friendly_name_format="Gateway LED F Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_GPIO_A, + friendly_name_format="Gateway GPIO A Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_GPIO_B, + friendly_name_format="Gateway GPIO B Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SB_TEMP, + friendly_name_format="Gateway Setback Temperature {}", + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + suggested_display_precision=SENSOR_FLOAT_SUGGESTED_DISPLAY_PRECISION, + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SETP_OVRD_MODE, + friendly_name_format="Gateway Room Setpoint Override Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_SMART_PWR, + friendly_name_format="Gateway Smart Power Mode {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_THRM_DETECT, + friendly_name_format="Gateway Thermostat Detection {}", + ), + ), + ( + [gw_vars.OTGW], + OpenThermSensorEntityDescription( + key=gw_vars.OTGW_VREF, + friendly_name_format="Gateway Reference Voltage Setting {}", + ), + ), +) async def async_setup_entry( @@ -23,82 +624,42 @@ async def async_setup_entry( async_add_entities: AddEntitiesCallback, ) -> None: """Set up the OpenTherm Gateway sensors.""" - gw_dev = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] + gw_hub = hass.data[DATA_OPENTHERM_GW][DATA_GATEWAYS][config_entry.data[CONF_ID]] async_add_entities( OpenThermSensor( - gw_dev, - var, + gw_hub, source, - info[0], - info[1], - info[2], - info[3], + description, ) - for var, info in SENSOR_INFO.items() - for source in info[4] + for sources, description in SENSOR_INFO + for source in sources ) -class OpenThermSensor(SensorEntity): +class OpenThermSensor(OpenThermEntity, SensorEntity): """Representation of an OpenTherm Gateway sensor.""" - _attr_should_poll = False - _attr_entity_registry_enabled_default = False - _attr_available = False + entity_description: OpenThermSensorEntityDescription def __init__( self, - gw_dev, - var, - source, - device_class, - unit, - friendly_name_format, - suggested_display_precision, - ): + gw_hub: OpenThermGatewayHub, + source: str, + description: OpenThermSensorEntityDescription, + ) -> None: """Initialize the OpenTherm Gateway sensor.""" self.entity_id = async_generate_entity_id( - ENTITY_ID_FORMAT, f"{var}_{source}_{gw_dev.gw_id}", hass=gw_dev.hass + ENTITY_ID_FORMAT, + f"{description.key}_{source}_{gw_hub.hub_id}", + hass=gw_hub.hass, ) - self._gateway = gw_dev - self._var = var - self._source = source - self._attr_device_class = device_class - self._attr_native_unit_of_measurement = unit - if TRANSLATE_SOURCE[source] is not None: - friendly_name_format = ( - f"{friendly_name_format} ({TRANSLATE_SOURCE[source]})" - ) - self._attr_name = friendly_name_format.format(gw_dev.name) - self._unsub_updates = None - self._attr_unique_id = f"{gw_dev.gw_id}-{source}-{var}" - if suggested_display_precision: - self._attr_suggested_display_precision = suggested_display_precision - self._attr_device_info = DeviceInfo( - identifiers={(DOMAIN, gw_dev.gw_id)}, - manufacturer="Schelte Bron", - model="OpenTherm Gateway", - name=gw_dev.name, - sw_version=gw_dev.gw_version, - ) - - async def async_added_to_hass(self) -> None: - """Subscribe to updates from the component.""" - _LOGGER.debug("Added OpenTherm Gateway sensor %s", self._attr_name) - self._unsub_updates = async_dispatcher_connect( - self.hass, self._gateway.update_signal, self.receive_report - ) - - async def async_will_remove_from_hass(self) -> None: - """Unsubscribe from updates from the component.""" - _LOGGER.debug("Removing OpenTherm Gateway sensor %s", self._attr_name) - self._unsub_updates() + super().__init__(gw_hub, source, description) @callback - def receive_report(self, status): + def receive_report(self, status: dict[str, dict]) -> None: """Handle status updates from the component.""" self._attr_available = self._gateway.connected - value = status[self._source].get(self._var) + value = status[self._source].get(self.entity_description.key) self._attr_native_value = value self.async_write_ha_state() diff --git a/homeassistant/components/opower/coordinator.py b/homeassistant/components/opower/coordinator.py index d0795ae4e15..3249cf1a375 100644 --- a/homeassistant/components/opower/coordinator.py +++ b/homeassistant/components/opower/coordinator.py @@ -98,7 +98,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): account.meter_type.name.lower(), # Some utilities like AEP have "-" in their account id. # Replace it with "_" to avoid "Invalid statistic_id" - account.utility_account_id.replace("-", "_"), + account.utility_account_id.replace("-", "_").lower(), ) ) cost_statistic_id = f"{DOMAIN}:{id_prefix}_energy_cost" @@ -110,7 +110,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): ) last_stat = await get_instance(self.hass).async_add_executor_job( - get_last_statistics, self.hass, 1, cost_statistic_id, True, set() + get_last_statistics, self.hass, 1, consumption_statistic_id, True, set() ) if not last_stat: _LOGGER.debug("Updating statistic for the first time") @@ -124,7 +124,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): cost_reads = await self._async_get_cost_reads( account, self.api.utility.timezone(), - last_stat[cost_statistic_id][0]["start"], + last_stat[consumption_statistic_id][0]["start"], ) if not cost_reads: _LOGGER.debug("No recent usage/cost data. Skipping update") @@ -141,7 +141,7 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): ) cost_sum = cast(float, stats[cost_statistic_id][0]["sum"]) consumption_sum = cast(float, stats[consumption_statistic_id][0]["sum"]) - last_stats_time = stats[cost_statistic_id][0]["start"] + last_stats_time = stats[consumption_statistic_id][0]["start"] cost_statistics = [] consumption_statistics = [] @@ -187,7 +187,17 @@ class OpowerCoordinator(DataUpdateCoordinator[dict[str, Forecast]]): else UnitOfVolume.CENTUM_CUBIC_FEET, ) + _LOGGER.debug( + "Adding %s statistics for %s", + len(cost_statistics), + cost_statistic_id, + ) async_add_external_statistics(self.hass, cost_metadata, cost_statistics) + _LOGGER.debug( + "Adding %s statistics for %s", + len(consumption_statistics), + consumption_statistic_id, + ) async_add_external_statistics( self.hass, consumption_metadata, consumption_statistics ) diff --git a/homeassistant/components/opower/manifest.json b/homeassistant/components/opower/manifest.json index b869356cdf9..02b98cfaf00 100644 --- a/homeassistant/components/opower/manifest.json +++ b/homeassistant/components/opower/manifest.json @@ -7,5 +7,5 @@ "documentation": "https://www.home-assistant.io/integrations/opower", "iot_class": "cloud_polling", "loggers": ["opower"], - "requirements": ["opower==0.6.0"] + "requirements": ["opower==0.7.0"] } diff --git a/homeassistant/components/osoenergy/config_flow.py b/homeassistant/components/osoenergy/config_flow.py index e0afc5292ae..0642250e9ed 100644 --- a/homeassistant/components/osoenergy/config_flow.py +++ b/homeassistant/components/osoenergy/config_flow.py @@ -69,9 +69,9 @@ class OSOEnergyFlowHandler(ConfigFlow, domain=DOMAIN): return None async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Re Authenticate a user.""" self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) - data = {CONF_API_KEY: user_input[CONF_API_KEY]} + data = {CONF_API_KEY: entry_data[CONF_API_KEY]} return await self.async_step_user(data) diff --git a/homeassistant/components/otbr/__init__.py b/homeassistant/components/otbr/__init__.py index 97c2f40eb99..4b95be1d40d 100644 --- a/homeassistant/components/otbr/__init__.py +++ b/homeassistant/components/otbr/__init__.py @@ -2,6 +2,8 @@ from __future__ import annotations +import logging + import aiohttp import python_otbr_api @@ -15,21 +17,27 @@ from homeassistant.helpers.typing import ConfigType from . import websocket_api from .const import DOMAIN -from .util import OTBRData, update_issues +from .util import ( + GetBorderAgentIdNotSupported, + OTBRData, + update_issues, + update_unique_id, +) + +_LOGGER = logging.getLogger(__name__) CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) +type OTBRConfigEntry = ConfigEntry[OTBRData] + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the Open Thread Border Router component.""" websocket_api.async_setup(hass) - if len(config_entries := hass.config_entries.async_entries(DOMAIN)): - for config_entry in config_entries[1:]: - await hass.config_entries.async_remove(config_entry.entry_id) return True -async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_setup_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> bool: """Set up an Open Thread Border Router config entry.""" api = python_otbr_api.OTBR(entry.data["url"], async_get_clientsession(hass), 10) @@ -38,13 +46,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: border_agent_id = await otbrdata.get_border_agent_id() dataset_tlvs = await otbrdata.get_active_dataset_tlvs() extended_address = await otbrdata.get_extended_address() - except ( - HomeAssistantError, - aiohttp.ClientError, - TimeoutError, - ) as err: - raise ConfigEntryNotReady("Unable to connect") from err - if border_agent_id is None: + except GetBorderAgentIdNotSupported: ir.async_create_issue( hass, DOMAIN, @@ -55,6 +57,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: translation_key="get_get_border_agent_id_unsupported", ) return False + except ( + HomeAssistantError, + aiohttp.ClientError, + TimeoutError, + ) as err: + raise ConfigEntryNotReady("Unable to connect") from err + await update_unique_id(hass, entry, border_agent_id) if dataset_tlvs: await update_issues(hass, otbrdata, dataset_tlvs) await async_add_dataset( @@ -66,31 +75,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: ) entry.async_on_unload(entry.add_update_listener(async_reload_entry)) - - hass.data[DOMAIN] = otbrdata + entry.runtime_data = otbrdata return True -async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: +async def async_unload_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> bool: """Unload a config entry.""" - hass.data.pop(DOMAIN) return True -async def async_reload_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: +async def async_reload_entry(hass: HomeAssistant, entry: OTBRConfigEntry) -> None: """Handle an options update.""" await hass.config_entries.async_reload(entry.entry_id) - - -async def async_get_active_dataset_tlvs(hass: HomeAssistant) -> bytes | None: - """Get current active operational dataset in TLVS format, or None. - - Returns None if there is no active operational dataset. - Raises if the http status is 400 or higher or if the response is invalid. - """ - if DOMAIN not in hass.data: - raise HomeAssistantError("OTBR API not available") - - data: OTBRData = hass.data[DOMAIN] - return await data.get_active_dataset_tlvs() diff --git a/homeassistant/components/otbr/config_flow.py b/homeassistant/components/otbr/config_flow.py index 8342a965bd3..c1747981b07 100644 --- a/homeassistant/components/otbr/config_flow.py +++ b/homeassistant/components/otbr/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from contextlib import suppress import logging -from typing import cast +from typing import TYPE_CHECKING, cast import aiohttp import python_otbr_api @@ -33,9 +33,16 @@ from .util import ( get_allowed_channel, ) +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) +class AlreadyConfigured(HomeAssistantError): + """Raised when the router is already configured.""" + + def _is_yellow(hass: HomeAssistant) -> bool: """Return True if Home Assistant is running on a Home Assistant Yellow.""" try: @@ -70,9 +77,8 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def _connect_and_set_dataset(self, otbr_url: str) -> None: + async def _set_dataset(self, api: python_otbr_api.OTBR, otbr_url: str) -> None: """Connect to the OTBR and create or apply a dataset if it doesn't have one.""" - api = python_otbr_api.OTBR(otbr_url, async_get_clientsession(self.hass), 10) if await api.get_active_dataset_tlvs() is None: allowed_channel = await get_allowed_channel(self.hass, otbr_url) @@ -89,7 +95,9 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): await api.set_active_dataset_tlvs(bytes.fromhex(thread_dataset_tlv)) else: _LOGGER.debug( - "not importing TLV with channel %s", thread_dataset_channel + "not importing TLV with channel %s for %s", + thread_dataset_channel, + otbr_url, ) pan_id = generate_random_pan_id() await api.create_active_dataset( @@ -101,30 +109,68 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): ) await api.set_enabled(True) + async def _is_border_agent_id_configured(self, border_agent_id: bytes) -> bool: + """Return True if another config entry's OTBR has the same border agent id.""" + config_entry: OTBRConfigEntry + for config_entry in self.hass.config_entries.async_loaded_entries(DOMAIN): + data = config_entry.runtime_data + try: + other_border_agent_id = await data.get_border_agent_id() + except HomeAssistantError: + _LOGGER.debug( + "Could not read border agent id from %s", data.url, exc_info=True + ) + continue + _LOGGER.debug( + "border agent id for existing url %s: %s", + data.url, + other_border_agent_id.hex(), + ) + if border_agent_id == other_border_agent_id: + return True + return False + + async def _connect_and_configure_router(self, otbr_url: str) -> bytes: + """Connect to the router and configure it if needed. + + Will raise if the router's border agent id is in use by another config entry. + Returns the router's border agent id. + """ + api = python_otbr_api.OTBR(otbr_url, async_get_clientsession(self.hass), 10) + border_agent_id = await api.get_border_agent_id() + _LOGGER.debug("border agent id for url %s: %s", otbr_url, border_agent_id.hex()) + + if await self._is_border_agent_id_configured(border_agent_id): + raise AlreadyConfigured + + await self._set_dataset(api, otbr_url) + + return border_agent_id + async def async_step_user( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: """Set up by user.""" - if self._async_current_entries(): - return self.async_abort(reason="single_instance_allowed") - errors = {} if user_input is not None: - url = user_input[CONF_URL] + url = user_input[CONF_URL].rstrip("/") try: - await self._connect_and_set_dataset(url) + border_agent_id = await self._connect_and_configure_router(url) + except AlreadyConfigured: + errors["base"] = "already_configured" except ( python_otbr_api.OTBRError, aiohttp.ClientError, TimeoutError, - ): + ) as exc: + _LOGGER.debug("Failed to communicate with OTBR@%s: %s", url, exc) errors["base"] = "cannot_connect" else: - await self.async_set_unique_id(DOMAIN) + await self.async_set_unique_id(border_agent_id.hex()) return self.async_create_entry( title="Open Thread Border Router", - data=user_input, + data={CONF_URL: url}, ) data_schema = vol.Schema({CONF_URL: str}) @@ -140,35 +186,40 @@ class OTBRConfigFlow(ConfigFlow, domain=DOMAIN): url = f"http://{config['host']}:{config['port']}" config_entry_data = {"url": url} - if self._async_in_progress(include_uninitialized=True): - # We currently don't handle multiple config entries, abort if hassio - # discovers multiple addons with otbr support - return self.async_abort(reason="single_instance_allowed") - if current_entries := self._async_current_entries(): for current_entry in current_entries: if current_entry.source != SOURCE_HASSIO: continue current_url = yarl.URL(current_entry.data["url"]) - if ( + if not (unique_id := current_entry.unique_id): # The first version did not set a unique_id # so if the entry does not have a unique_id # we have to assume it's the first version - current_entry.unique_id - and (current_entry.unique_id != discovery_info.uuid) + # This check can be removed in HA Core 2025.9 + unique_id = discovery_info.uuid + if ( + unique_id != discovery_info.uuid or current_url.host != config["host"] or current_url.port == config["port"] ): continue # Update URL with the new port self.hass.config_entries.async_update_entry( - current_entry, data=config_entry_data + current_entry, + data=config_entry_data, + unique_id=unique_id, # Remove in HA Core 2025.9 ) - return self.async_abort(reason="single_instance_allowed") + return self.async_abort(reason="already_configured") try: - await self._connect_and_set_dataset(url) - except python_otbr_api.OTBRError as exc: + await self._connect_and_configure_router(url) + except AlreadyConfigured: + return self.async_abort(reason="already_configured") + except ( + python_otbr_api.OTBRError, + aiohttp.ClientError, + TimeoutError, + ) as exc: _LOGGER.warning("Failed to communicate with OTBR@%s: %s", url, exc) return self.async_abort(reason="unknown") diff --git a/homeassistant/components/otbr/const.py b/homeassistant/components/otbr/const.py index cc3e4a9e6c3..c38b3cc1250 100644 --- a/homeassistant/components/otbr/const.py +++ b/homeassistant/components/otbr/const.py @@ -1,5 +1,7 @@ """Constants for the Open Thread Border Router integration.""" +from __future__ import annotations + DOMAIN = "otbr" DEFAULT_CHANNEL = 15 diff --git a/homeassistant/components/otbr/silabs_multiprotocol.py b/homeassistant/components/otbr/silabs_multiprotocol.py index bd7eb997558..d97e6811e6d 100644 --- a/homeassistant/components/otbr/silabs_multiprotocol.py +++ b/homeassistant/components/otbr/silabs_multiprotocol.py @@ -2,7 +2,10 @@ from __future__ import annotations +from collections.abc import Callable, Coroutine +from functools import wraps import logging +from typing import TYPE_CHECKING, Any, Concatenate import aiohttp from python_otbr_api import tlv_parser @@ -15,21 +18,60 @@ from homeassistant.components.thread import async_add_dataset from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from . import DOMAIN +from .const import DOMAIN from .util import OTBRData +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) -async def async_change_channel(hass: HomeAssistant, channel: int, delay: float) -> None: +def async_get_otbr_data[**_P, _R, _R_Def]( + retval: _R_Def, +) -> Callable[ + [Callable[Concatenate[HomeAssistant, OTBRData, _P], Coroutine[Any, Any, _R]]], + Callable[Concatenate[HomeAssistant, _P], Coroutine[Any, Any, _R | _R_Def]], +]: + """Decorate function to get OTBR data.""" + + def _async_get_otbr_data( + orig_func: Callable[ + Concatenate[HomeAssistant, OTBRData, _P], + Coroutine[Any, Any, _R], + ], + ) -> Callable[Concatenate[HomeAssistant, _P], Coroutine[Any, Any, _R | _R_Def]]: + """Decorate function to get OTBR data.""" + + @wraps(orig_func) + async def async_get_otbr_data_wrapper( + hass: HomeAssistant, *args: _P.args, **kwargs: _P.kwargs + ) -> _R | _R_Def: + """Fetch OTBR data and pass to orig_func.""" + config_entry: OTBRConfigEntry + for config_entry in hass.config_entries.async_loaded_entries(DOMAIN): + data = config_entry.runtime_data + if is_multiprotocol_url(data.url): + return await orig_func(hass, data, *args, **kwargs) + + return retval + + return async_get_otbr_data_wrapper + + return _async_get_otbr_data + + +@async_get_otbr_data(None) +async def async_change_channel( + hass: HomeAssistant, + data: OTBRData, + channel: int, + delay: float, +) -> None: """Set the channel to be used. Does nothing if not configured. """ - if DOMAIN not in hass.data: - return - - data: OTBRData = hass.data[DOMAIN] await data.set_channel(channel, delay) # Import the new dataset @@ -48,16 +90,12 @@ async def async_change_channel(hass: HomeAssistant, channel: int, delay: float) await async_add_dataset(hass, DOMAIN, dataset_tlvs_str) -async def async_get_channel(hass: HomeAssistant) -> int | None: +@async_get_otbr_data(None) +async def async_get_channel(hass: HomeAssistant, data: OTBRData) -> int | None: """Return the channel. Returns None if not configured. """ - if DOMAIN not in hass.data: - return None - - data: OTBRData = hass.data[DOMAIN] - try: dataset = await data.get_active_dataset() except ( @@ -74,13 +112,10 @@ async def async_get_channel(hass: HomeAssistant) -> int | None: return dataset.channel -async def async_using_multipan(hass: HomeAssistant) -> bool: +@async_get_otbr_data(False) +async def async_using_multipan(hass: HomeAssistant, data: OTBRData) -> bool: """Return if the multiprotocol device is used. Returns False if not configured. """ - if DOMAIN not in hass.data: - return False - - data: OTBRData = hass.data[DOMAIN] - return is_multiprotocol_url(data.url) + return True diff --git a/homeassistant/components/otbr/strings.json b/homeassistant/components/otbr/strings.json index 838ebeb5b8c..bc7812c1db7 100644 --- a/homeassistant/components/otbr/strings.json +++ b/homeassistant/components/otbr/strings.json @@ -9,6 +9,7 @@ } }, "error": { + "already_configured": "The Thread border router is already configured", "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]" }, "abort": { diff --git a/homeassistant/components/otbr/util.py b/homeassistant/components/otbr/util.py index 16cf3b60e37..351e23c7736 100644 --- a/homeassistant/components/otbr/util.py +++ b/homeassistant/components/otbr/util.py @@ -7,8 +7,9 @@ import dataclasses from functools import wraps import logging import random -from typing import Any, Concatenate, cast +from typing import TYPE_CHECKING, Any, Concatenate, cast +import aiohttp import python_otbr_api from python_otbr_api import PENDING_DATASET_DELAY_TIMER, tlv_parser from python_otbr_api.pskc import compute_pskc @@ -21,12 +22,16 @@ from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon multi_pan_addon_using_device, ) from homeassistant.components.homeassistant_yellow import RADIO_DEVICE as YELLOW_RADIO +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from .const import DOMAIN +if TYPE_CHECKING: + from . import OTBRConfigEntry + _LOGGER = logging.getLogger(__name__) INFO_URL_SKY_CONNECT = ( @@ -47,6 +52,10 @@ INSECURE_PASSPHRASES = ( ) +class GetBorderAgentIdNotSupported(HomeAssistantError): + """Raised from python_otbr_api.GetBorderAgentIdNotSupportedError.""" + + def compose_default_network_name(pan_id: int) -> str: """Generate a default network name.""" return f"ha-thread-{pan_id:04x}" @@ -67,7 +76,7 @@ def _handle_otbr_error[**_P, _R]( async def _func(self: OTBRData, *args: _P.args, **kwargs: _P.kwargs) -> _R: try: return await func(self, *args, **kwargs) - except python_otbr_api.OTBRError as exc: + except (python_otbr_api.OTBRError, aiohttp.ClientError, TimeoutError) as exc: raise HomeAssistantError("Failed to call OTBR API") from exc return _func @@ -82,7 +91,7 @@ class OTBRData: entry_id: str @_handle_otbr_error - async def factory_reset(self) -> None: + async def factory_reset(self, hass: HomeAssistant) -> None: """Reset the router.""" try: await self.api.factory_reset() @@ -91,14 +100,19 @@ class OTBRData: "OTBR does not support factory reset, attempting to delete dataset" ) await self.delete_active_dataset() + await update_unique_id( + hass, + hass.config_entries.async_get_entry(self.entry_id), + await self.get_border_agent_id(), + ) @_handle_otbr_error - async def get_border_agent_id(self) -> bytes | None: + async def get_border_agent_id(self) -> bytes: """Get the border agent ID or None if not supported by the router.""" try: return await self.api.get_border_agent_id() - except python_otbr_api.GetBorderAgentIdNotSupportedError: - return None + except python_otbr_api.GetBorderAgentIdNotSupportedError as exc: + raise GetBorderAgentIdNotSupported from exc @_handle_otbr_error async def set_enabled(self, enabled: bool) -> None: @@ -257,3 +271,18 @@ async def update_issues( """Raise or clear repair issues related to network settings.""" await _warn_on_channel_collision(hass, otbrdata, dataset_tlvs) _warn_on_default_network_settings(hass, otbrdata, dataset_tlvs) + + +async def update_unique_id( + hass: HomeAssistant, entry: OTBRConfigEntry | None, border_agent_id: bytes +) -> None: + """Update the config entry's unique_id if not matching.""" + border_agent_id_hex = border_agent_id.hex() + if entry and entry.source == SOURCE_USER and entry.unique_id != border_agent_id_hex: + _LOGGER.debug( + "Updating unique_id of entry %s from %s to %s", + entry.entry_id, + entry.unique_id, + border_agent_id_hex, + ) + hass.config_entries.async_update_entry(entry, unique_id=border_agent_id_hex) diff --git a/homeassistant/components/otbr/websocket_api.py b/homeassistant/components/otbr/websocket_api.py index 9b7e46bc362..2bcd0da8f16 100644 --- a/homeassistant/components/otbr/websocket_api.py +++ b/homeassistant/components/otbr/websocket_api.py @@ -2,7 +2,7 @@ from collections.abc import Callable, Coroutine from functools import wraps -from typing import Any, cast +from typing import TYPE_CHECKING, Any, cast import python_otbr_api from python_otbr_api import PENDING_DATASET_DELAY_TIMER, tlv_parser @@ -26,6 +26,9 @@ from .util import ( update_issues, ) +if TYPE_CHECKING: + from . import OTBRConfigEntry + @callback def async_setup(hass: HomeAssistant) -> None: @@ -47,41 +50,45 @@ async def websocket_info( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Get OTBR info.""" - if DOMAIN not in hass.data: + config_entries: list[OTBRConfigEntry] + config_entries = hass.config_entries.async_loaded_entries(DOMAIN) + + if not config_entries: connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded") return - data: OTBRData = hass.data[DOMAIN] + response: dict[str, dict[str, Any]] = {} - try: - border_agent_id = await data.get_border_agent_id() - dataset = await data.get_active_dataset() - dataset_tlvs = await data.get_active_dataset_tlvs() - extended_address = (await data.get_extended_address()).hex() - except HomeAssistantError as exc: - connection.send_error(msg["id"], "otbr_info_failed", str(exc)) - return + for config_entry in config_entries: + data = config_entry.runtime_data + try: + border_agent_id = await data.get_border_agent_id() + dataset = await data.get_active_dataset() + dataset_tlvs = await data.get_active_dataset_tlvs() + extended_address = (await data.get_extended_address()).hex() + except HomeAssistantError as exc: + connection.send_error(msg["id"], "otbr_info_failed", str(exc)) + return - # The border agent ID is checked when the OTBR config entry is setup, - # we can assert it's not None - assert border_agent_id is not None + # The border agent ID is checked when the OTBR config entry is setup, + # we can assert it's not None + assert border_agent_id is not None - extended_pan_id = ( - dataset.extended_pan_id.lower() if dataset and dataset.extended_pan_id else None - ) - connection.send_result( - msg["id"], - { - extended_address: { - "active_dataset_tlvs": dataset_tlvs.hex() if dataset_tlvs else None, - "border_agent_id": border_agent_id.hex(), - "channel": dataset.channel if dataset else None, - "extended_address": extended_address, - "extended_pan_id": extended_pan_id, - "url": data.url, - } - }, - ) + extended_pan_id = ( + dataset.extended_pan_id.lower() + if dataset and dataset.extended_pan_id + else None + ) + response[extended_address] = { + "active_dataset_tlvs": dataset_tlvs.hex() if dataset_tlvs else None, + "border_agent_id": border_agent_id.hex(), + "channel": dataset.channel if dataset else None, + "extended_address": extended_address, + "extended_pan_id": extended_pan_id, + "url": data.url, + } + + connection.send_result(msg["id"], response) def async_get_otbr_data( @@ -99,22 +106,29 @@ def async_get_otbr_data( hass: HomeAssistant, connection: websocket_api.ActiveConnection, msg: dict ) -> None: """Fetch OTBR data and pass to orig_func.""" - if DOMAIN not in hass.data: + config_entries: list[OTBRConfigEntry] + config_entries = hass.config_entries.async_loaded_entries(DOMAIN) + + if not config_entries: connection.send_error(msg["id"], "not_loaded", "No OTBR API loaded") return - data: OTBRData = hass.data[DOMAIN] + for config_entry in config_entries: + data = config_entry.runtime_data + try: + extended_address = await data.get_extended_address() + except HomeAssistantError as exc: + connection.send_error( + msg["id"], "get_extended_address_failed", str(exc) + ) + return + if extended_address.hex() != msg["extended_address"]: + continue - try: - extended_address = await data.get_extended_address() - except HomeAssistantError as exc: - connection.send_error(msg["id"], "get_extended_address_failed", str(exc)) - return - if extended_address.hex() != msg["extended_address"]: - connection.send_error(msg["id"], "unknown_router", "") + await orig_func(hass, connection, msg, data) return - await orig_func(hass, connection, msg, data) + connection.send_error(msg["id"], "unknown_router", "") return async_check_extended_address_func @@ -144,7 +158,7 @@ async def websocket_create_network( return try: - await data.factory_reset() + await data.factory_reset(hass) except HomeAssistantError as exc: connection.send_error(msg["id"], "factory_reset_failed", str(exc)) return diff --git a/homeassistant/components/otp/config_flow.py b/homeassistant/components/otp/config_flow.py index 6aa4532683a..33f63a04d68 100644 --- a/homeassistant/components/otp/config_flow.py +++ b/homeassistant/components/otp/config_flow.py @@ -82,15 +82,15 @@ class TOTPConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from yaml.""" - await self.async_set_unique_id(import_info[CONF_TOKEN]) + await self.async_set_unique_id(import_data[CONF_TOKEN]) self._abort_if_unique_id_configured() return self.async_create_entry( - title=import_info.get(CONF_NAME, DEFAULT_NAME), - data=import_info, + title=import_data.get(CONF_NAME, DEFAULT_NAME), + data=import_data, ) async def async_step_confirm( diff --git a/homeassistant/components/overkiz/binary_sensor.py b/homeassistant/components/overkiz/binary_sensor.py index 8ea86e03e8c..57df3cd4e09 100644 --- a/homeassistant/components/overkiz/binary_sensor.py +++ b/homeassistant/components/overkiz/binary_sensor.py @@ -115,14 +115,24 @@ BINARY_SENSOR_DESCRIPTIONS: list[OverkizBinarySensorDescription] = [ OverkizBinarySensorDescription( key=OverkizState.MODBUSLINK_DHW_ABSENCE_MODE, name="Absence mode", - value_fn=lambda state: state - in (OverkizCommandParam.ON, OverkizCommandParam.PROG), + value_fn=( + lambda state: state in (OverkizCommandParam.ON, OverkizCommandParam.PROG) + ), ), OverkizBinarySensorDescription( key=OverkizState.MODBUSLINK_DHW_BOOST_MODE, name="Boost mode", - value_fn=lambda state: state - in (OverkizCommandParam.ON, OverkizCommandParam.PROG), + value_fn=( + lambda state: state in (OverkizCommandParam.ON, OverkizCommandParam.PROG) + ), + ), + OverkizBinarySensorDescription( + key=OverkizState.MODBUSLINK_DHW_MODE, + name="Manual mode", + value_fn=( + lambda state: state + in (OverkizCommandParam.MANUAL, OverkizCommandParam.MANUAL_ECO_INACTIVE) + ), ), ] diff --git a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py index f18edd0cfe6..9027dcf8d03 100644 --- a/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py +++ b/homeassistant/components/overkiz/climate_entities/atlantic_pass_apc_zone_control_zone.py @@ -234,7 +234,8 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): """Set new target hvac mode.""" if self.is_using_derogated_temperature_fallback: - return await super().async_set_hvac_mode(hvac_mode) + await super().async_set_hvac_mode(hvac_mode) + return # They are mainly managed by the Zone Control device # However, it make sense to map the OFF Mode to the Overkiz STOP Preset @@ -287,7 +288,8 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): """Set new preset mode.""" if self.is_using_derogated_temperature_fallback: - return await super().async_set_preset_mode(preset_mode) + await super().async_set_preset_mode(preset_mode) + return mode = PRESET_MODES_TO_OVERKIZ[preset_mode] @@ -361,7 +363,8 @@ class AtlanticPassAPCZoneControlZone(AtlanticPassAPCHeatingZone): """Set new temperature.""" if self.is_using_derogated_temperature_fallback: - return await super().async_set_temperature(**kwargs) + await super().async_set_temperature(**kwargs) + return target_temperature = kwargs.get(ATTR_TEMPERATURE) target_temp_low = kwargs.get(ATTR_TARGET_TEMP_LOW) diff --git a/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py b/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py index 85ce7ae57e3..acc761664ec 100644 --- a/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py +++ b/homeassistant/components/overkiz/climate_entities/somfy_heating_temperature_interface.py @@ -181,6 +181,7 @@ class SomfyHeatingTemperatureInterface(OverkizEntity, ClimateEntity): OverkizState.OVP_HEATING_TEMPERATURE_INTERFACE_SETPOINT_MODE ] ) and mode.value_as_str: - return await self.executor.async_execute_command( + await self.executor.async_execute_command( SETPOINT_MODE_TO_OVERKIZ_COMMAND[mode.value_as_str], temperature ) + return diff --git a/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py b/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py index de995a2bd1a..0f57d13433b 100644 --- a/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py +++ b/homeassistant/components/overkiz/water_heater_entities/atlantic_domestic_hot_water_production_mlb_component.py @@ -6,6 +6,7 @@ from pyoverkiz.enums import OverkizCommand, OverkizCommandParam, OverkizState from homeassistant.components.water_heater import ( STATE_ECO, + STATE_ELECTRIC, STATE_OFF, STATE_PERFORMANCE, WaterHeaterEntity, @@ -28,9 +29,10 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE | WaterHeaterEntityFeature.ON_OFF ) _attr_operation_list = [ - OverkizCommandParam.PERFORMANCE, - OverkizCommandParam.ECO, - OverkizCommandParam.MANUAL, + STATE_ECO, + STATE_OFF, + STATE_PERFORMANCE, + STATE_ELECTRIC, ] def __init__( @@ -116,20 +118,20 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE cast(str, self.executor.select_state(OverkizState.MODBUSLINK_DHW_MODE)) == OverkizCommandParam.MANUAL_ECO_INACTIVE ): - return OverkizCommandParam.MANUAL + # STATE_ELECTRIC is a substitution for OverkizCommandParam.MANUAL + # to keep up with the conventional state usage only + # https://developers.home-assistant.io/docs/core/entity/water-heater/#states + return STATE_ELECTRIC return STATE_OFF async def async_set_operation_mode(self, operation_mode: str) -> None: """Set new operation mode.""" - if operation_mode in (STATE_PERFORMANCE, OverkizCommandParam.BOOST): + if operation_mode == STATE_PERFORMANCE: if self.is_away_mode_on: await self.async_turn_away_mode_off() await self.async_turn_boost_mode_on() - elif operation_mode in ( - OverkizCommandParam.ECO, - OverkizCommandParam.MANUAL_ECO_ACTIVE, - ): + elif operation_mode == STATE_ECO: if self.is_away_mode_on: await self.async_turn_away_mode_off() if self.is_boost_mode_on: @@ -137,10 +139,7 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE await self.executor.async_execute_command( OverkizCommand.SET_DHW_MODE, OverkizCommandParam.AUTO_MODE ) - elif operation_mode in ( - OverkizCommandParam.MANUAL, - OverkizCommandParam.MANUAL_ECO_INACTIVE, - ): + elif operation_mode == STATE_ELECTRIC: if self.is_away_mode_on: await self.async_turn_away_mode_off() if self.is_boost_mode_on: @@ -148,14 +147,8 @@ class AtlanticDomesticHotWaterProductionMBLComponent(OverkizEntity, WaterHeaterE await self.executor.async_execute_command( OverkizCommand.SET_DHW_MODE, OverkizCommandParam.MANUAL_ECO_INACTIVE ) - else: - if self.is_away_mode_on: - await self.async_turn_away_mode_off() - if self.is_boost_mode_on: - await self.async_turn_boost_mode_off() - await self.executor.async_execute_command( - OverkizCommand.SET_DHW_MODE, operation_mode - ) + elif operation_mode == STATE_OFF: + await self.async_turn_away_mode_on() async def async_turn_away_mode_on(self) -> None: """Turn away mode on.""" diff --git a/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py b/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py index 9f0a8798233..dc2a93a8d2f 100644 --- a/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py +++ b/homeassistant/components/overkiz/water_heater_entities/hitachi_dhw.py @@ -87,9 +87,10 @@ class HitachiDHW(OverkizEntity, WaterHeaterEntity): """Set new target operation mode.""" # Turn water heater off if operation_mode == OverkizCommandParam.OFF: - return await self.executor.async_execute_command( + await self.executor.async_execute_command( OverkizCommand.SET_CONTROL_DHW, OverkizCommandParam.STOP ) + return # Turn water heater on, when off if self.current_operation == OverkizCommandParam.OFF: diff --git a/homeassistant/components/owntracks/config_flow.py b/homeassistant/components/owntracks/config_flow.py index 29fe4f0cf65..390cc880c1e 100644 --- a/homeassistant/components/owntracks/config_flow.py +++ b/homeassistant/components/owntracks/config_flow.py @@ -1,9 +1,10 @@ """Config flow for OwnTracks.""" import secrets +from typing import Any from homeassistant.components import cloud, webhook -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_WEBHOOK_ID from .const import DOMAIN @@ -18,7 +19,9 @@ class OwnTracksFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a user initiated set up flow to create OwnTracks webhook.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/panasonic_viera/config_flow.py b/homeassistant/components/panasonic_viera/config_flow.py index 0226fb33c9e..b00fee513a6 100644 --- a/homeassistant/components/panasonic_viera/config_flow.py +++ b/homeassistant/components/panasonic_viera/config_flow.py @@ -157,11 +157,9 @@ class PanasonicVieraConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(user_input=import_config) + return await self.async_step_user(user_input=import_data) async def async_load_data(self, config: dict[str, Any]) -> None: """Load the data.""" diff --git a/homeassistant/components/permobil/config_flow.py b/homeassistant/components/permobil/config_flow.py index cb47640e55f..f7f247a412e 100644 --- a/homeassistant/components/permobil/config_flow.py +++ b/homeassistant/components/permobil/config_flow.py @@ -161,17 +161,12 @@ class PermobilConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=self.data[CONF_EMAIL], data=self.data) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" - reauth_entry = self.hass.config_entries.async_get_entry( - self.context["entry_id"] - ) - assert reauth_entry - try: - email: str = reauth_entry.data[CONF_EMAIL] - region: str = reauth_entry.data[CONF_REGION] + email: str = entry_data[CONF_EMAIL] + region: str = entry_data[CONF_REGION] self.p_api.set_email(email) self.p_api.set_region(region) self.data = { diff --git a/homeassistant/components/persistent_notification/icons.json b/homeassistant/components/persistent_notification/icons.json index 9c782bd7b21..30847357a47 100644 --- a/homeassistant/components/persistent_notification/icons.json +++ b/homeassistant/components/persistent_notification/icons.json @@ -1,7 +1,13 @@ { "services": { - "create": "mdi:message-badge", - "dismiss": "mdi:bell-off", - "dismiss_all": "mdi:notification-clear-all" + "create": { + "service": "mdi:message-badge" + }, + "dismiss": { + "service": "mdi:bell-off" + }, + "dismiss_all": { + "service": "mdi:notification-clear-all" + } } } diff --git a/homeassistant/components/person/icons.json b/homeassistant/components/person/icons.json index fbfd5be75d2..f645d9c2090 100644 --- a/homeassistant/components/person/icons.json +++ b/homeassistant/components/person/icons.json @@ -8,6 +8,8 @@ } }, "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/pi_hole/__init__.py b/homeassistant/components/pi_hole/__init__.py index ad36b664994..bf314e96dec 100644 --- a/homeassistant/components/pi_hole/__init__.py +++ b/homeassistant/components/pi_hole/__init__.py @@ -20,7 +20,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed -from homeassistant.helpers import config_validation as cv, entity_registry as er +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.update_coordinator import ( @@ -33,7 +33,6 @@ from .const import CONF_STATISTICS_ONLY, DOMAIN, MIN_TIME_BETWEEN_UPDATES _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [ Platform.BINARY_SENSOR, diff --git a/homeassistant/components/pi_hole/icons.json b/homeassistant/components/pi_hole/icons.json index 58f20da5a2d..3a45f8ab454 100644 --- a/homeassistant/components/pi_hole/icons.json +++ b/homeassistant/components/pi_hole/icons.json @@ -36,6 +36,8 @@ } }, "services": { - "disable": "mdi:server-off" + "disable": { + "service": "mdi:server-off" + } } } diff --git a/homeassistant/components/picnic/config_flow.py b/homeassistant/components/picnic/config_flow.py index 3023b5309de..9548029209b 100644 --- a/homeassistant/components/picnic/config_flow.py +++ b/homeassistant/components/picnic/config_flow.py @@ -87,7 +87,9 @@ class PicnicConfigFlow(ConfigFlow, domain=DOMAIN): """Perform the re-auth step upon an API authentication error.""" return await self.async_step_user() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the authentication step, this is the generic step for both `step_user` and `step_reauth`.""" if user_input is None: return self.async_show_form( diff --git a/homeassistant/components/picnic/icons.json b/homeassistant/components/picnic/icons.json index d8f99153f33..78803b6d263 100644 --- a/homeassistant/components/picnic/icons.json +++ b/homeassistant/components/picnic/icons.json @@ -57,6 +57,8 @@ } }, "services": { - "add_product": "mdi:cart-plus" + "add_product": { + "service": "mdi:cart-plus" + } } } diff --git a/homeassistant/components/pilight/icons.json b/homeassistant/components/pilight/icons.json index c1b8e741e45..cbc48cf2105 100644 --- a/homeassistant/components/pilight/icons.json +++ b/homeassistant/components/pilight/icons.json @@ -1,5 +1,7 @@ { "services": { - "send": "mdi:send" + "send": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/plaato/config_flow.py b/homeassistant/components/plaato/config_flow.py index 1240abc5e81..3ada4fdc312 100644 --- a/homeassistant/components/plaato/config_flow.py +++ b/homeassistant/components/plaato/config_flow.py @@ -2,11 +2,18 @@ from __future__ import annotations +from typing import Any + from pyplaato.plaato import PlaatoDeviceType import voluptuous as vol from homeassistant.components import cloud, webhook -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_SCAN_INTERVAL, CONF_TOKEN, CONF_WEBHOOK_ID from homeassistant.core import callback import homeassistant.helpers.config_validation as cv @@ -31,11 +38,13 @@ class PlaatoConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize.""" - self._init_info = {} + self._init_info: dict[str, Any] = {} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user step.""" if user_input is not None: @@ -185,7 +194,9 @@ class PlaatoOptionsFlowHandler(OptionsFlow): return await self.async_step_user() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Manage the options.""" if user_input is not None: return self.async_create_entry(title="", data=user_input) diff --git a/homeassistant/components/plex/config_flow.py b/homeassistant/components/plex/config_flow.py index 374067c94cd..7162e517e23 100644 --- a/homeassistant/components/plex/config_flow.py +++ b/homeassistant/components/plex/config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Mapping import copy import logging -from typing import Any +from typing import TYPE_CHECKING, Any from aiohttp import web_response import plexapi.exceptions @@ -105,15 +105,15 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): """Get the options flow for this handler.""" return PlexOptionsFlowHandler(config_entry) - def __init__(self): + def __init__(self) -> None: """Initialize the Plex flow.""" - self.current_login = {} + self.current_login: dict[str, Any] = {} self.available_servers = None self.plexauth = None self.token = None self.client_id = None self._manual = False - self._reauth_config = None + self._reauth_config: dict[str, Any] | None = None async def async_step_user(self, user_input=None, errors=None): """Handle a flow initialized by the user.""" @@ -184,7 +184,9 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): step_id="manual_setup", data_schema=data_schema, errors=errors ) - async def async_step_server_validate(self, server_config): + async def async_step_server_validate( + self, server_config: dict[str, Any] + ) -> ConfigFlowResult: """Validate a provided configuration.""" if self._reauth_config: server_config = {**self._reauth_config, **server_config} @@ -249,6 +251,8 @@ class PlexFlowHandler(ConfigFlow, domain=DOMAIN): entry = await self.async_set_unique_id(server_id) if self.context[CONF_SOURCE] == SOURCE_REAUTH: + if TYPE_CHECKING: + assert entry self.hass.config_entries.async_update_entry(entry, data=data) _LOGGER.debug("Updated config entry for %s", plex_server.friendly_name) await self.hass.config_entries.async_reload(entry.entry_id) diff --git a/homeassistant/components/plex/icons.json b/homeassistant/components/plex/icons.json index 03bc835d2f6..2d3a7342ad2 100644 --- a/homeassistant/components/plex/icons.json +++ b/homeassistant/components/plex/icons.json @@ -7,7 +7,11 @@ } }, "services": { - "refresh_library": "mdi:refresh", - "scan_for_clients": "mdi:database-refresh" + "refresh_library": { + "service": "mdi:refresh" + }, + "scan_for_clients": { + "service": "mdi:database-refresh" + } } } diff --git a/homeassistant/components/plex/manifest.json b/homeassistant/components/plex/manifest.json index 323bca0477a..6270a6d3496 100644 --- a/homeassistant/components/plex/manifest.json +++ b/homeassistant/components/plex/manifest.json @@ -8,7 +8,7 @@ "iot_class": "local_push", "loggers": ["plexapi", "plexwebsocket"], "requirements": [ - "PlexAPI==4.15.14", + "PlexAPI==4.15.16", "plexauth==0.0.6", "plexwebsocket==0.0.14" ], diff --git a/homeassistant/components/point/config_flow.py b/homeassistant/components/point/config_flow.py index 279561b4e2b..b2455438208 100644 --- a/homeassistant/components/point/config_flow.py +++ b/homeassistant/components/point/config_flow.py @@ -3,12 +3,13 @@ import asyncio from collections import OrderedDict import logging +from typing import Any from pypoint import PointSession import voluptuous as vol from homeassistant.components.http import KEY_HASS, HomeAssistantView -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import callback from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -59,7 +60,9 @@ class PointFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_auth() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" flows = self.hass.data.get(DATA_FLOW_IMPL, {}) diff --git a/homeassistant/components/powerwall/__init__.py b/homeassistant/components/powerwall/__init__.py index be09c729237..0b6f889b90a 100644 --- a/homeassistant/components/powerwall/__init__.py +++ b/homeassistant/components/powerwall/__init__.py @@ -22,7 +22,6 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.aiohttp_client import async_create_clientsession -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util.network import is_ip_address @@ -34,8 +33,6 @@ from .models import ( PowerwallRuntimeData, ) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/private_ble_device/manifest.json b/homeassistant/components/private_ble_device/manifest.json index 8b072361d34..6759cdda0f0 100644 --- a/homeassistant/components/private_ble_device/manifest.json +++ b/homeassistant/components/private_ble_device/manifest.json @@ -6,5 +6,5 @@ "dependencies": ["bluetooth_adapters"], "documentation": "https://www.home-assistant.io/integrations/private_ble_device", "iot_class": "local_push", - "requirements": ["bluetooth-data-tools==1.19.4"] + "requirements": ["bluetooth-data-tools==1.20.0"] } diff --git a/homeassistant/components/profiler/config_flow.py b/homeassistant/components/profiler/config_flow.py index 4acce51e25f..19995cf79aa 100644 --- a/homeassistant/components/profiler/config_flow.py +++ b/homeassistant/components/profiler/config_flow.py @@ -1,6 +1,8 @@ """Config flow for Profiler integration.""" -from homeassistant.config_entries import ConfigFlow +from typing import Any + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from .const import DEFAULT_NAME, DOMAIN @@ -10,7 +12,9 @@ class ProfilerConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") diff --git a/homeassistant/components/profiler/icons.json b/homeassistant/components/profiler/icons.json index 4dda003c186..c1f996b6eb1 100644 --- a/homeassistant/components/profiler/icons.json +++ b/homeassistant/components/profiler/icons.json @@ -1,16 +1,40 @@ { "services": { - "start": "mdi:play", - "memory": "mdi:memory", - "start_log_objects": "mdi:invoice-text-plus", - "stop_log_objects": "mdi:invoice-text-remove", - "dump_log_objects": "mdi:invoice-export-outline", - "start_log_object_sources": "mdi:play", - "stop_log_object_sources": "mdi:stop", - "lru_stats": "mdi:chart-areaspline", - "log_current_tasks": "mdi:format-list-bulleted", - "log_thread_frames": "mdi:format-list-bulleted", - "log_event_loop_scheduled": "mdi:calendar-clock", - "set_asyncio_debug": "mdi:bug-check" + "start": { + "service": "mdi:play" + }, + "memory": { + "service": "mdi:memory" + }, + "start_log_objects": { + "service": "mdi:invoice-text-plus" + }, + "stop_log_objects": { + "service": "mdi:invoice-text-remove" + }, + "dump_log_objects": { + "service": "mdi:invoice-export-outline" + }, + "start_log_object_sources": { + "service": "mdi:play" + }, + "stop_log_object_sources": { + "service": "mdi:stop" + }, + "lru_stats": { + "service": "mdi:chart-areaspline" + }, + "log_current_tasks": { + "service": "mdi:format-list-bulleted" + }, + "log_thread_frames": { + "service": "mdi:format-list-bulleted" + }, + "log_event_loop_scheduled": { + "service": "mdi:calendar-clock" + }, + "set_asyncio_debug": { + "service": "mdi:bug-check" + } } } diff --git a/homeassistant/components/progettihwsw/config_flow.py b/homeassistant/components/progettihwsw/config_flow.py index dbe12184a10..95596b940a4 100644 --- a/homeassistant/components/progettihwsw/config_flow.py +++ b/homeassistant/components/progettihwsw/config_flow.py @@ -1,9 +1,11 @@ """Config flow for ProgettiHWSW Automation integration.""" +from typing import Any + from ProgettiHWSW.ProgettiHWSWAPI import ProgettiHWSWAPI import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -38,7 +40,7 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Initialize class variables.""" - self.s1_in = None + self.s1_in: dict[str, Any] | None = None async def async_step_relay_modes(self, user_input=None): """Manage relay modes step.""" @@ -66,7 +68,9 @@ class ProgettiHWSWConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/prometheus/__init__.py b/homeassistant/components/prometheus/__init__.py index f7037a685a6..8cc0a8f4b6a 100644 --- a/homeassistant/components/prometheus/__init__.py +++ b/homeassistant/components/prometheus/__init__.py @@ -51,6 +51,16 @@ from homeassistant.const import ( CONTENT_TYPE_TEXT_PLAIN, EVENT_STATE_CHANGED, PERCENTAGE, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMED, + STATE_ALARM_DISARMING, + STATE_ALARM_PENDING, + STATE_ALARM_TRIGGERED, STATE_CLOSED, STATE_CLOSING, STATE_ON, @@ -807,6 +817,35 @@ class PrometheusMetrics: value = self.state_as_number(state) metric.labels(**self._labels(state)).set(value) + def _handle_alarm_control_panel(self, state: State) -> None: + current_state = state.state + + if current_state: + metric = self._metric( + "alarm_control_panel_state", + prometheus_client.Gauge, + "State of the alarm control panel (0/1)", + ["state"], + ) + + alarm_states = [ + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_CUSTOM_BYPASS, + STATE_ALARM_ARMED_HOME, + STATE_ALARM_ARMED_NIGHT, + STATE_ALARM_ARMED_VACATION, + STATE_ALARM_DISARMED, + STATE_ALARM_TRIGGERED, + STATE_ALARM_PENDING, + STATE_ALARM_ARMING, + STATE_ALARM_DISARMING, + ] + + for alarm_state in alarm_states: + metric.labels(**dict(self._labels(state), state=alarm_state)).set( + float(alarm_state == current_state) + ) + class PrometheusView(HomeAssistantView): """Handle Prometheus requests.""" diff --git a/homeassistant/components/prosegur/camera.py b/homeassistant/components/prosegur/camera.py index fd911fa5898..2df6ff62038 100644 --- a/homeassistant/components/prosegur/camera.py +++ b/homeassistant/components/prosegur/camera.py @@ -31,7 +31,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_REQUEST_IMAGE, - {}, + None, "async_request_image", ) diff --git a/homeassistant/components/prosegur/config_flow.py b/homeassistant/components/prosegur/config_flow.py index 82cf1d424c7..7a8f67cef7d 100644 --- a/homeassistant/components/prosegur/config_flow.py +++ b/homeassistant/components/prosegur/config_flow.py @@ -51,7 +51,9 @@ class ProsegurConfigFlow(ConfigFlow, domain=DOMAIN): user_input: dict contracts: list[dict[str, str]] - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/prosegur/icons.json b/homeassistant/components/prosegur/icons.json index 33cddefdaea..8f175ab9056 100644 --- a/homeassistant/components/prosegur/icons.json +++ b/homeassistant/components/prosegur/icons.json @@ -1,5 +1,7 @@ { "services": { - "request_image": "mdi:image-sync" + "request_image": { + "service": "mdi:image-sync" + } } } diff --git a/homeassistant/components/proximity/__init__.py b/homeassistant/components/proximity/__init__.py index 813686789a2..763274243c5 100644 --- a/homeassistant/components/proximity/__init__.py +++ b/homeassistant/components/proximity/__init__.py @@ -3,137 +3,20 @@ from __future__ import annotations import logging -from typing import cast -import voluptuous as vol - -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry -from homeassistant.const import ( - CONF_DEVICES, - CONF_NAME, - CONF_UNIT_OF_MEASUREMENT, - CONF_ZONE, - STATE_UNKNOWN, - Platform, -) -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant -import homeassistant.helpers.config_validation as cv +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.helpers.event import ( async_track_entity_registry_updated_event, async_track_state_change_event, ) -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue -from homeassistant.helpers.typing import ConfigType -from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import ( - ATTR_DIR_OF_TRAVEL, - ATTR_DIST_TO, - ATTR_NEAREST, - CONF_IGNORED_ZONES, - CONF_TOLERANCE, - CONF_TRACKED_ENTITIES, - DEFAULT_PROXIMITY_ZONE, - DEFAULT_TOLERANCE, - DOMAIN, - UNITS, -) +from .const import CONF_TRACKED_ENTITIES from .coordinator import ProximityConfigEntry, ProximityDataUpdateCoordinator -from .helpers import entity_used_in _LOGGER = logging.getLogger(__name__) -ZONE_SCHEMA = vol.Schema( - { - vol.Optional(CONF_ZONE, default=DEFAULT_PROXIMITY_ZONE): cv.string, - vol.Optional(CONF_DEVICES, default=[]): vol.All(cv.ensure_list, [cv.entity_id]), - vol.Optional(CONF_IGNORED_ZONES, default=[]): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(CONF_TOLERANCE, default=DEFAULT_TOLERANCE): cv.positive_int, - vol.Optional(CONF_UNIT_OF_MEASUREMENT): vol.All(cv.string, vol.In(UNITS)), - } -) - -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - {DOMAIN: cv.schema_with_slug_keys(ZONE_SCHEMA)}, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def _async_setup_legacy( - hass: HomeAssistant, - entry: ProximityConfigEntry, - coordinator: ProximityDataUpdateCoordinator, -) -> None: - """Legacy proximity entity handling, can be removed in 2024.8.""" - friendly_name = entry.data[CONF_NAME] - proximity = Proximity(hass, friendly_name, coordinator) - await proximity.async_added_to_hass() - proximity.async_write_ha_state() - - if used_in := entity_used_in(hass, f"{DOMAIN}.{friendly_name}"): - async_create_issue( - hass, - DOMAIN, - f"deprecated_proximity_entity_{friendly_name}", - breaks_in_ha_version="2024.8.0", - is_fixable=True, - is_persistent=True, - severity=IssueSeverity.WARNING, - translation_key="deprecated_proximity_entity", - translation_placeholders={ - "entity": f"{DOMAIN}.{friendly_name}", - "used_in": "\n- ".join([f"`{x}`" for x in used_in]), - }, - ) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Get the zones and offsets from configuration.yaml.""" - if DOMAIN in config: - for friendly_name, proximity_config in config[DOMAIN].items(): - _LOGGER.debug("import %s with config:%s", friendly_name, proximity_config) - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_NAME: friendly_name, - CONF_ZONE: f"zone.{proximity_config[CONF_ZONE]}", - CONF_TRACKED_ENTITIES: proximity_config[CONF_DEVICES], - CONF_IGNORED_ZONES: [ - f"zone.{zone}" - for zone in proximity_config[CONF_IGNORED_ZONES] - ], - CONF_TOLERANCE: proximity_config[CONF_TOLERANCE], - CONF_UNIT_OF_MEASUREMENT: proximity_config.get( - CONF_UNIT_OF_MEASUREMENT, hass.config.units.length_unit - ), - }, - ) - ) - - async_create_issue( - hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Proximity", - }, - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: ProximityConfigEntry) -> bool: """Set up Proximity from a config entry.""" @@ -160,9 +43,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ProximityConfigEntry) -> await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator - if entry.source == SOURCE_IMPORT: - await _async_setup_legacy(hass, entry, coordinator) - await hass.config_entries.async_forward_entry_setups(entry, [Platform.SENSOR]) entry.async_on_unload(entry.add_update_listener(_async_update_listener)) return True @@ -176,45 +56,3 @@ async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def _async_update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Handle options update.""" await hass.config_entries.async_reload(entry.entry_id) - - -class Proximity(CoordinatorEntity[ProximityDataUpdateCoordinator]): - """Representation of a Proximity.""" - - # This entity is legacy and does not have a platform. - # We can't fix this easily without breaking changes. - _no_platform_reported = True - - def __init__( - self, - hass: HomeAssistant, - friendly_name: str, - coordinator: ProximityDataUpdateCoordinator, - ) -> None: - """Initialize the proximity.""" - super().__init__(coordinator) - self.hass = hass - self.entity_id = f"{DOMAIN}.{friendly_name}" - - self._attr_name = friendly_name - self._attr_unit_of_measurement = self.coordinator.unit_of_measurement - - @property - def data(self) -> dict[str, str | int | None]: - """Get data from coordinator.""" - return self.coordinator.data.proximity - - @property - def state(self) -> str | float: - """Return the state.""" - if isinstance(distance := self.data[ATTR_DIST_TO], str): - return distance - return self.coordinator.convert_legacy(cast(int, distance)) - - @property - def extra_state_attributes(self) -> dict[str, str]: - """Return the state attributes.""" - return { - ATTR_DIR_OF_TRAVEL: str(self.data[ATTR_DIR_OF_TRAVEL] or STATE_UNKNOWN), - ATTR_NEAREST: str(self.data[ATTR_NEAREST]), - } diff --git a/homeassistant/components/proximity/config_flow.py b/homeassistant/components/proximity/config_flow.py index d133b14cb6a..1758b182ad7 100644 --- a/homeassistant/components/proximity/config_flow.py +++ b/homeassistant/components/proximity/config_flow.py @@ -117,12 +117,6 @@ class ProximityConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=self._user_form_schema(user_input), ) - async def async_step_import( - self, user_input: dict[str, Any] | None = None - ) -> ConfigFlowResult: - """Import a yaml config entry.""" - return await self.async_step_user(user_input) - class ProximityOptionsFlow(OptionsFlow): """Handle a option flow.""" diff --git a/homeassistant/components/proximity/coordinator.py b/homeassistant/components/proximity/coordinator.py index 2d32926832a..a8dd85c1523 100644 --- a/homeassistant/components/proximity/coordinator.py +++ b/homeassistant/components/proximity/coordinator.py @@ -13,7 +13,6 @@ from homeassistant.const import ( ATTR_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_ZONE, - UnitOfLength, ) from homeassistant.core import ( Event, @@ -27,7 +26,6 @@ from homeassistant.helpers.issue_registry import IssueSeverity, async_create_iss from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator from homeassistant.util.location import distance -from homeassistant.util.unit_conversion import DistanceConverter from .const import ( ATTR_DIR_OF_TRAVEL, @@ -145,18 +143,6 @@ class ProximityDataUpdateCoordinator(DataUpdateCoordinator[ProximityData]): }, ) - def convert_legacy(self, value: float | str) -> float | str: - """Round and convert given distance value.""" - if isinstance(value, str): - return value - return round( - DistanceConverter.convert( - value, - UnitOfLength.METERS, - self.unit_of_measurement, - ) - ) - def _calc_distance_to_zone( self, zone: State, diff --git a/homeassistant/components/proximity/helpers.py b/homeassistant/components/proximity/helpers.py deleted file mode 100644 index af3d6d2a3bb..00000000000 --- a/homeassistant/components/proximity/helpers.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Helper functions for proximity.""" - -from homeassistant.components.automation import automations_with_entity -from homeassistant.components.script import scripts_with_entity -from homeassistant.core import HomeAssistant - - -def entity_used_in(hass: HomeAssistant, entity_id: str) -> list[str]: - """Get list of related automations and scripts.""" - used_in = automations_with_entity(hass, entity_id) - used_in += scripts_with_entity(hass, entity_id) - return used_in diff --git a/homeassistant/components/proximity/strings.json b/homeassistant/components/proximity/strings.json index 72c95eeeeae..118004e908e 100644 --- a/homeassistant/components/proximity/strings.json +++ b/homeassistant/components/proximity/strings.json @@ -55,17 +55,6 @@ } }, "issues": { - "deprecated_proximity_entity": { - "title": "The proximity entity is deprecated", - "fix_flow": { - "step": { - "confirm": { - "title": "[%key:component::proximity::issues::deprecated_proximity_entity::title%]", - "description": "The proximity entity `{entity}` is deprecated and will be removed in `2024.8`. However it is used within the following configurations:\n- {used_in}\n\nPlease adjust any automations or scripts that use this deprecated Proximity entity.\nFor each tracked person or device one sensor for the distance and the direction of travel to/from the monitored zone is created. Additionally for each Proximity configuration one sensor which shows the nearest device or person to the monitored zone is created. With this you can use the Min/Max integration to determine the nearest and furthest distance." - } - } - } - }, "tracked_entity_removed": { "title": "Tracked entity has been removed", "fix_flow": { diff --git a/homeassistant/components/ps4/config_flow.py b/homeassistant/components/ps4/config_flow.py index b842c2f7cfb..cdbf02dcc90 100644 --- a/homeassistant/components/ps4/config_flow.py +++ b/homeassistant/components/ps4/config_flow.py @@ -1,13 +1,14 @@ """Config Flow for PlayStation 4.""" from collections import OrderedDict +from typing import Any from pyps4_2ndscreen.errors import CredentialTimeout from pyps4_2ndscreen.helpers import Helper from pyps4_2ndscreen.media_art import COUNTRIES import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import ( CONF_CODE, CONF_HOST, @@ -44,7 +45,7 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): VERSION = CONFIG_ENTRY_VERSION - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" self.helper = Helper() self.creds = None @@ -54,9 +55,11 @@ class PlayStation4FlowHandler(ConfigFlow, domain=DOMAIN): self.pin = None self.m_device = None self.location = None - self.device_list = [] + self.device_list: list[str] = [] - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a user config flow.""" # Check if able to bind to ports: UDP 987, TCP 997. ports = PORT_MSG.keys() diff --git a/homeassistant/components/ps4/icons.json b/homeassistant/components/ps4/icons.json index 8da5909213b..21f8405f816 100644 --- a/homeassistant/components/ps4/icons.json +++ b/homeassistant/components/ps4/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "send_command": "mdi:console" + "send_command": { + "service": "mdi:console" + } } } diff --git a/homeassistant/components/pvpc_hourly_pricing/__init__.py b/homeassistant/components/pvpc_hourly_pricing/__init__.py index a92f159d172..6327164e3c8 100644 --- a/homeassistant/components/pvpc_hourly_pricing/__init__.py +++ b/homeassistant/components/pvpc_hourly_pricing/__init__.py @@ -3,7 +3,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv import homeassistant.helpers.entity_registry as er from .const import ATTR_POWER, ATTR_POWER_P3, DOMAIN @@ -11,7 +10,6 @@ from .coordinator import ElecPricesDataUpdateCoordinator from .helpers import get_enabled_sensor_keys PLATFORMS: list[Platform] = [Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: diff --git a/homeassistant/components/pyload/config_flow.py b/homeassistant/components/pyload/config_flow.py index 2f4f9519d30..79b90ff917d 100644 --- a/homeassistant/components/pyload/config_flow.py +++ b/homeassistant/components/pyload/config_flow.py @@ -133,16 +133,16 @@ class PyLoadConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_info: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import config from yaml.""" config = { - CONF_NAME: import_info.get(CONF_NAME), - CONF_HOST: import_info.get(CONF_HOST, DEFAULT_HOST), - CONF_PASSWORD: import_info.get(CONF_PASSWORD, ""), - CONF_PORT: import_info.get(CONF_PORT, DEFAULT_PORT), - CONF_SSL: import_info.get(CONF_SSL, False), - CONF_USERNAME: import_info.get(CONF_USERNAME, ""), + CONF_NAME: import_data.get(CONF_NAME), + CONF_HOST: import_data.get(CONF_HOST, DEFAULT_HOST), + CONF_PASSWORD: import_data.get(CONF_PASSWORD, ""), + CONF_PORT: import_data.get(CONF_PORT, DEFAULT_PORT), + CONF_SSL: import_data.get(CONF_SSL, False), + CONF_USERNAME: import_data.get(CONF_USERNAME, ""), CONF_VERIFY_SSL: False, } diff --git a/homeassistant/components/pyload/strings.json b/homeassistant/components/pyload/strings.json index 38e17e5016f..bbe6989f5e7 100644 --- a/homeassistant/components/pyload/strings.json +++ b/homeassistant/components/pyload/strings.json @@ -74,7 +74,7 @@ "name": "Downloads in queue" }, "total": { - "name": "Finished downloads" + "name": "Total downloads" }, "free_space": { "name": "Free space" diff --git a/homeassistant/components/python_script/__init__.py b/homeassistant/components/python_script/__init__.py index 72e2f3a824b..70e9c5b0d29 100644 --- a/homeassistant/components/python_script/__init__.py +++ b/homeassistant/components/python_script/__init__.py @@ -108,13 +108,13 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: return True -def discover_scripts(hass): +def discover_scripts(hass: HomeAssistant) -> None: """Discover python scripts in folder.""" path = hass.config.path(FOLDER) if not os.path.isdir(path): _LOGGER.warning("Folder %s not found in configuration folder", FOLDER) - return False + return def python_script_service_handler(call: ServiceCall) -> ServiceResponse: """Handle python script service calls.""" @@ -277,7 +277,7 @@ def execute(hass, filename, source, data=None, return_response=False): if not isinstance(restricted_globals["output"], dict): output_type = type(restricted_globals["output"]) restricted_globals["output"] = {} - raise ScriptError( + raise ScriptError( # noqa: TRY301 f"Expected `output` to be a dictionary, was {output_type}" ) except ScriptError as err: diff --git a/homeassistant/components/python_script/icons.json b/homeassistant/components/python_script/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/python_script/icons.json +++ b/homeassistant/components/python_script/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/qbittorrent/icons.json b/homeassistant/components/qbittorrent/icons.json index 68fc1020dae..cede127ebe8 100644 --- a/homeassistant/components/qbittorrent/icons.json +++ b/homeassistant/components/qbittorrent/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "get_torrents": "mdi:file-arrow-up-down-outline", - "get_all_torrents": "mdi:file-arrow-up-down-outline" + "get_torrents": { + "service": "mdi:file-arrow-up-down-outline" + }, + "get_all_torrents": { + "service": "mdi:file-arrow-up-down-outline" + } } } diff --git a/homeassistant/components/qvr_pro/icons.json b/homeassistant/components/qvr_pro/icons.json index 556a8d40752..3b57387d251 100644 --- a/homeassistant/components/qvr_pro/icons.json +++ b/homeassistant/components/qvr_pro/icons.json @@ -1,6 +1,10 @@ { "services": { - "start_record": "mdi:record-rec", - "stop_record": "mdi:stop" + "start_record": { + "service": "mdi:record-rec" + }, + "stop_record": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/rachio/__init__.py b/homeassistant/components/rachio/__init__.py index a5922e0cb95..6976d3f5ba6 100644 --- a/homeassistant/components/rachio/__init__.py +++ b/homeassistant/components/rachio/__init__.py @@ -11,7 +11,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_KEY, CONF_WEBHOOK_ID, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv from .const import CONF_CLOUDHOOK_URL, CONF_MANUAL_RUN_MINS, DOMAIN from .device import RachioPerson @@ -25,8 +24,6 @@ _LOGGER = logging.getLogger(__name__) PLATFORMS = [Platform.BINARY_SENSOR, Platform.CALENDAR, Platform.SWITCH] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Unload a config entry.""" diff --git a/homeassistant/components/rachio/config_flow.py b/homeassistant/components/rachio/config_flow.py index 77fe20946b4..bdd2f81536d 100644 --- a/homeassistant/components/rachio/config_flow.py +++ b/homeassistant/components/rachio/config_flow.py @@ -4,6 +4,7 @@ from __future__ import annotations from http import HTTPStatus import logging +from typing import Any from rachiopy import Rachio from requests.exceptions import ConnectTimeout @@ -67,7 +68,9 @@ class RachioConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/rachio/icons.json b/homeassistant/components/rachio/icons.json index dfab8788fc8..df30929ab4c 100644 --- a/homeassistant/components/rachio/icons.json +++ b/homeassistant/components/rachio/icons.json @@ -10,11 +10,23 @@ } }, "services": { - "set_zone_moisture_percent": "mdi:water-percent", - "start_multiple_zone_schedule": "mdi:play", - "pause_watering": "mdi:pause", - "resume_watering": "mdi:play", - "stop_watering": "mdi:stop", - "start_watering": "mdi:water" + "set_zone_moisture_percent": { + "service": "mdi:water-percent" + }, + "start_multiple_zone_schedule": { + "service": "mdi:play" + }, + "pause_watering": { + "service": "mdi:pause" + }, + "resume_watering": { + "service": "mdi:play" + }, + "stop_watering": { + "service": "mdi:stop" + }, + "start_watering": { + "service": "mdi:water" + } } } diff --git a/homeassistant/components/radarr/config_flow.py b/homeassistant/components/radarr/config_flow.py index 3bf0796a9a8..c748c63e992 100644 --- a/homeassistant/components/radarr/config_flow.py +++ b/homeassistant/components/radarr/config_flow.py @@ -26,7 +26,9 @@ class RadarrConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 entry: RadarrConfigEntry | None = None - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self.entry = self.hass.config_entries.async_get_entry(self.context["entry_id"]) diff --git a/homeassistant/components/rainbird/icons.json b/homeassistant/components/rainbird/icons.json index 79d2256f184..61c09f74e88 100644 --- a/homeassistant/components/rainbird/icons.json +++ b/homeassistant/components/rainbird/icons.json @@ -22,7 +22,11 @@ } }, "services": { - "start_irrigation": "mdi:water", - "set_rain_delay": "mdi:water-sync" + "start_irrigation": { + "service": "mdi:water" + }, + "set_rain_delay": { + "service": "mdi:water-sync" + } } } diff --git a/homeassistant/components/raincloud/__init__.py b/homeassistant/components/raincloud/__init__.py index e6f5d2ecf8d..a805024357c 100644 --- a/homeassistant/components/raincloud/__init__.py +++ b/homeassistant/components/raincloud/__init__.py @@ -102,7 +102,7 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: try: raincloud = RainCloudy(username=username, password=password) if not raincloud.is_connected: - raise HTTPError + raise HTTPError # noqa: TRY301 hass.data[DATA_RAINCLOUD] = RainCloudHub(raincloud) except (ConnectTimeout, HTTPError) as ex: _LOGGER.error("Unable to connect to Rain Cloud service: %s", str(ex)) diff --git a/homeassistant/components/rainmachine/__init__.py b/homeassistant/components/rainmachine/__init__.py index cfbc95cf009..b10d562ac67 100644 --- a/homeassistant/components/rainmachine/__init__.py +++ b/homeassistant/components/rainmachine/__init__.py @@ -58,7 +58,6 @@ from .model import RainMachineEntityDescription DEFAULT_SSL = True -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [ Platform.BINARY_SENSOR, diff --git a/homeassistant/components/rainmachine/icons.json b/homeassistant/components/rainmachine/icons.json index 32988081a18..ca85d81346e 100644 --- a/homeassistant/components/rainmachine/icons.json +++ b/homeassistant/components/rainmachine/icons.json @@ -70,16 +70,38 @@ } }, "services": { - "pause_watering": "mdi:pause", - "restrict_watering": "mdi:cancel", - "start_program": "mdi:play", - "start_zone": "mdi:play", - "stop_all": "mdi:stop", - "stop_program": "mdi:stop", - "stop_zone": "mdi:stop", - "unpause_watering": "mdi:play-pause", - "push_flow_meter_data": "mdi:database-arrow-up", - "push_weather_data": "mdi:database-arrow-up", - "unrestrict_watering": "mdi:check" + "pause_watering": { + "service": "mdi:pause" + }, + "restrict_watering": { + "service": "mdi:cancel" + }, + "start_program": { + "service": "mdi:play" + }, + "start_zone": { + "service": "mdi:play" + }, + "stop_all": { + "service": "mdi:stop" + }, + "stop_program": { + "service": "mdi:stop" + }, + "stop_zone": { + "service": "mdi:stop" + }, + "unpause_watering": { + "service": "mdi:play-pause" + }, + "push_flow_meter_data": { + "service": "mdi:database-arrow-up" + }, + "push_weather_data": { + "service": "mdi:database-arrow-up" + }, + "unrestrict_watering": { + "service": "mdi:check" + } } } diff --git a/homeassistant/components/rainmachine/switch.py b/homeassistant/components/rainmachine/switch.py index d4c0064219e..8368db47d61 100644 --- a/homeassistant/components/rainmachine/switch.py +++ b/homeassistant/components/rainmachine/switch.py @@ -6,7 +6,7 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine from dataclasses import dataclass from datetime import datetime -from typing import Any, Concatenate, cast +from typing import Any, Concatenate from regenmaschine.errors import RainMachineError import voluptuous as vol @@ -184,8 +184,8 @@ async def async_setup_entry( """Set up RainMachine switches based on a config entry.""" platform = entity_platform.async_get_current_platform() - for service_name, schema, method in ( - ("start_program", {}, "async_start_program"), + services: tuple[tuple[str, VolDictType | None, str], ...] = ( + ("start_program", None, "async_start_program"), ( "start_zone", { @@ -195,11 +195,11 @@ async def async_setup_entry( }, "async_start_zone", ), - ("stop_program", {}, "async_stop_program"), - ("stop_zone", {}, "async_stop_zone"), - ): - schema_dict = cast(VolDictType, schema) - platform.async_register_entity_service(service_name, schema_dict, method) + ("stop_program", None, "async_stop_program"), + ("stop_zone", None, "async_stop_zone"), + ) + for service_name, schema, method in services: + platform.async_register_entity_service(service_name, schema, method) data = entry.runtime_data entities: list[RainMachineBaseSwitch] = [] diff --git a/homeassistant/components/recorder/core.py b/homeassistant/components/recorder/core.py index 31c36be9c88..c57274317e3 100644 --- a/homeassistant/components/recorder/core.py +++ b/homeassistant/components/recorder/core.py @@ -367,13 +367,6 @@ class Recorder(threading.Thread): """Add an executor job from within the event loop.""" return self.hass.loop.run_in_executor(self._db_executor, target, *args) - def _stop_executor(self) -> None: - """Stop the executor.""" - if self._db_executor is None: - return - self._db_executor.shutdown() - self._db_executor = None - @callback def _async_check_queue(self, *_: Any) -> None: """Periodic check of the queue size to ensure we do not exhaust memory. @@ -1501,5 +1494,13 @@ class Recorder(threading.Thread): try: self._end_session() finally: - self._stop_executor() + if self._db_executor: + # We shutdown the executor without forcefully + # joining the threads until after we have tried + # to cleanly close the connection. + self._db_executor.shutdown(join_threads_or_timeout=False) self._close_connection() + if self._db_executor: + # After the connection is closed, we can join the threads + # or forcefully shutdown the threads if they take too long. + self._db_executor.join_threads_or_timeout() diff --git a/homeassistant/components/recorder/db_schema.py b/homeassistant/components/recorder/db_schema.py index dd293ed6bc2..6ba9d971f2c 100644 --- a/homeassistant/components/recorder/db_schema.py +++ b/homeassistant/components/recorder/db_schema.py @@ -77,7 +77,7 @@ class LegacyBase(DeclarativeBase): """Base class for tables, used for schema migration.""" -SCHEMA_VERSION = 45 +SCHEMA_VERSION = 47 _LOGGER = logging.getLogger(__name__) @@ -693,13 +693,13 @@ class StatisticsBase: @classmethod def from_stats(cls, metadata_id: int, stats: StatisticData) -> Self: - """Create object from a statistics with datatime objects.""" + """Create object from a statistics with datetime objects.""" return cls( # type: ignore[call-arg] metadata_id=metadata_id, created=None, created_ts=time.time(), start=None, - start_ts=dt_util.utc_to_timestamp(stats["start"]), + start_ts=stats["start"].timestamp(), mean=stats.get("mean"), min=stats.get("min"), max=stats.get("max"), diff --git a/homeassistant/components/recorder/icons.json b/homeassistant/components/recorder/icons.json index 1090401abd5..9e41637184a 100644 --- a/homeassistant/components/recorder/icons.json +++ b/homeassistant/components/recorder/icons.json @@ -1,8 +1,16 @@ { "services": { - "purge": "mdi:database-sync", - "purge_entities": "mdi:database-sync", - "disable": "mdi:database-off", - "enable": "mdi:database" + "purge": { + "service": "mdi:database-sync" + }, + "purge_entities": { + "service": "mdi:database-sync" + }, + "disable": { + "service": "mdi:database-off" + }, + "enable": { + "service": "mdi:database" + } } } diff --git a/homeassistant/components/recorder/manifest.json b/homeassistant/components/recorder/manifest.json index 7d5576e4672..2be4b6862ba 100644 --- a/homeassistant/components/recorder/manifest.json +++ b/homeassistant/components/recorder/manifest.json @@ -8,7 +8,7 @@ "quality_scale": "internal", "requirements": [ "SQLAlchemy==2.0.31", - "fnv-hash-fast==0.5.0", + "fnv-hash-fast==1.0.2", "psutil-home-assistant==0.0.1" ] } diff --git a/homeassistant/components/recorder/migration.py b/homeassistant/components/recorder/migration.py index 55856dcf449..7127a576580 100644 --- a/homeassistant/components/recorder/migration.py +++ b/homeassistant/components/recorder/migration.py @@ -15,7 +15,6 @@ from uuid import UUID import sqlalchemy from sqlalchemy import ForeignKeyConstraint, MetaData, Table, func, text, update from sqlalchemy.engine import CursorResult, Engine -from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint from sqlalchemy.exc import ( DatabaseError, IntegrityError, @@ -580,12 +579,24 @@ def _modify_columns( _LOGGER.exception( "Could not modify column %s in table %s", column_def, table_name ) + raise def _update_states_table_with_foreign_key_options( session_maker: Callable[[], Session], engine: Engine ) -> None: - """Add the options to foreign key constraints.""" + """Add the options to foreign key constraints. + + This is not supported for SQLite because it does not support + dropping constraints. + """ + + if engine.dialect.name not in (SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL): + raise RuntimeError( + "_update_states_table_with_foreign_key_options not supported for " + f"{engine.dialect.name}" + ) + inspector = sqlalchemy.inspect(engine) tmp_states_table = Table(TABLE_STATES, MetaData()) alters = [ @@ -596,7 +607,7 @@ def _update_states_table_with_foreign_key_options( "columns": foreign_key["constrained_columns"], } for foreign_key in inspector.get_foreign_keys(TABLE_STATES) - if foreign_key["name"] + if foreign_key["name"] # It's not possible to drop an unnamed constraint and ( # MySQL/MariaDB will have empty options not foreign_key.get("options") @@ -628,20 +639,26 @@ def _update_states_table_with_foreign_key_options( _LOGGER.exception( "Could not update foreign options in %s table", TABLE_STATES ) + raise def _drop_foreign_key_constraints( session_maker: Callable[[], Session], engine: Engine, table: str, column: str -) -> tuple[bool, list[tuple[str, str, ReflectedForeignKeyConstraint]]]: - """Drop foreign key constraints for a table on specific columns.""" - inspector = sqlalchemy.inspect(engine) - dropped_constraints = [ - (table, column, foreign_key) - for foreign_key in inspector.get_foreign_keys(table) - if foreign_key["name"] and foreign_key["constrained_columns"] == [column] - ] +) -> None: + """Drop foreign key constraints for a table on specific columns. - ## Bind the ForeignKeyConstraints to the table + This is not supported for SQLite because it does not support + dropping constraints. + """ + + if engine.dialect.name not in (SupportedDialect.MYSQL, SupportedDialect.POSTGRESQL): + raise RuntimeError( + f"_drop_foreign_key_constraints not supported for {engine.dialect.name}" + ) + + inspector = sqlalchemy.inspect(engine) + + ## Find matching named constraints and bind the ForeignKeyConstraints to the table tmp_table = Table(table, MetaData()) drops = [ ForeignKeyConstraint((), (), name=foreign_key["name"], table=tmp_table) @@ -649,7 +666,6 @@ def _drop_foreign_key_constraints( if foreign_key["name"] and foreign_key["constrained_columns"] == [column] ] - fk_remove_ok = True for drop in drops: with session_scope(session=session_maker()) as session: try: @@ -661,9 +677,7 @@ def _drop_foreign_key_constraints( TABLE_STATES, column, ) - fk_remove_ok = False - - return fk_remove_ok, dropped_constraints + raise def _restore_foreign_key_constraints( @@ -681,6 +695,18 @@ def _restore_foreign_key_constraints( _LOGGER.info("Did not find a matching constraint for %s.%s", table, column) continue + inspector = sqlalchemy.inspect(engine) + if any( + foreign_key["name"] and foreign_key["constrained_columns"] == [column] + for foreign_key in inspector.get_foreign_keys(table) + ): + _LOGGER.info( + "The database already has a matching constraint for %s.%s", + table, + column, + ) + continue + if TYPE_CHECKING: assert foreign_table is not None assert foreign_column is not None @@ -727,6 +753,7 @@ def _add_constraint( connection.execute(add_constraint) except (InternalError, OperationalError): _LOGGER.exception("Could not update foreign options in %s table", table) + raise def _delete_foreign_key_violations( @@ -807,9 +834,9 @@ def _delete_foreign_key_violations( result = session.connection().execute( # We don't use an alias for the table we're deleting from, # support of the form `DELETE FROM table AS t1` was added in - # MariaDB 11.6 and is not supported by MySQL. Those engines - # instead support the from `DELETE t1 from table AS t1` which - # is not supported by PostgreSQL and undocumented for MariaDB. + # MariaDB 11.6 and is not supported by MySQL. MySQL and older + # MariaDB instead support the from `DELETE t1 from table AS t1` + # which is undocumented for MariaDB. text( f"DELETE FROM {table} " # noqa: S608 "WHERE (" @@ -1025,7 +1052,17 @@ class _SchemaVersion11Migrator(_SchemaVersionMigrator, target_version=11): def _apply_update(self) -> None: """Version specific update method.""" _create_index(self.session_maker, "states", "ix_states_old_state_id") - _update_states_table_with_foreign_key_options(self.session_maker, self.engine) + + # _update_states_table_with_foreign_key_options first drops foreign + # key constraints, and then re-adds them with the correct settings. + # This is not supported by SQLite + if self.engine.dialect.name in ( + SupportedDialect.MYSQL, + SupportedDialect.POSTGRESQL, + ): + _update_states_table_with_foreign_key_options( + self.session_maker, self.engine + ) class _SchemaVersion12Migrator(_SchemaVersionMigrator, target_version=12): @@ -1079,9 +1116,18 @@ class _SchemaVersion15Migrator(_SchemaVersionMigrator, target_version=15): class _SchemaVersion16Migrator(_SchemaVersionMigrator, target_version=16): def _apply_update(self) -> None: """Version specific update method.""" - _drop_foreign_key_constraints( - self.session_maker, self.engine, TABLE_STATES, "old_state_id" - ) + # Dropping foreign key constraints is not supported by SQLite + if self.engine.dialect.name in ( + SupportedDialect.MYSQL, + SupportedDialect.POSTGRESQL, + ): + # Version 16 changes settings for the foreign key constraint on + # states.old_state_id. Dropping the constraint is not really correct + # we should have recreated it instead. Recreating the constraint now + # happens in the migration to schema version 47. + _drop_foreign_key_constraints( + self.session_maker, self.engine, TABLE_STATES, "old_state_id" + ) class _SchemaVersion17Migrator(_SchemaVersionMigrator, target_version=17): @@ -1603,6 +1649,24 @@ class _SchemaVersion43Migrator(_SchemaVersionMigrator, target_version=43): ) +class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): + def _apply_update(self) -> None: + """Version specific update method.""" + # The changes in this version are identical to the changes in version + # 46. We apply the same changes again because the migration code previously + # swallowed errors which caused some users' databases to end up in an + # undefined state after the migration. + + +class _SchemaVersion45Migrator(_SchemaVersionMigrator, target_version=45): + def _apply_update(self) -> None: + """Version specific update method.""" + # The changes in this version are identical to the changes in version + # 47. We apply the same changes again because the migration code previously + # swallowed errors which caused some users' databases to end up in an + # undefined state after the migration. + + FOREIGN_COLUMNS = ( ( "events", @@ -1635,7 +1699,7 @@ FOREIGN_COLUMNS = ( ) -class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): +class _SchemaVersion46Migrator(_SchemaVersionMigrator, target_version=46): def _apply_update(self) -> None: """Version specific update method.""" # We skip this step for SQLITE, it doesn't have differently sized integers @@ -1686,14 +1750,14 @@ class _SchemaVersion44Migrator(_SchemaVersionMigrator, target_version=44): ) -class _SchemaVersion45Migrator(_SchemaVersionMigrator, target_version=45): +class _SchemaVersion47Migrator(_SchemaVersionMigrator, target_version=47): def _apply_update(self) -> None: """Version specific update method.""" # We skip this step for SQLITE, it doesn't have differently sized integers if self.engine.dialect.name == SupportedDialect.SQLITE: return - # Restore constraints dropped in migration to schema version 44 + # Restore constraints dropped in migration to schema version 46 _restore_foreign_key_constraints( self.session_maker, self.engine, @@ -2141,9 +2205,14 @@ def cleanup_legacy_states_event_ids(instance: Recorder) -> bool: # so we have to rebuild the table fk_remove_ok = rebuild_sqlite_table(session_maker, instance.engine, States) else: - fk_remove_ok, _ = _drop_foreign_key_constraints( - session_maker, instance.engine, TABLE_STATES, "event_id" - ) + try: + _drop_foreign_key_constraints( + session_maker, instance.engine, TABLE_STATES, "event_id" + ) + except (InternalError, OperationalError): + fk_remove_ok = False + else: + fk_remove_ok = True if fk_remove_ok: _drop_index(session_maker, "states", LEGACY_STATES_EVENT_ID_INDEX) instance.use_legacy_events_index = False diff --git a/homeassistant/components/recorder/models/time.py b/homeassistant/components/recorder/models/time.py index 6295060c8d3..8f0f89a9ffa 100644 --- a/homeassistant/components/recorder/models/time.py +++ b/homeassistant/components/recorder/models/time.py @@ -65,9 +65,7 @@ def process_datetime_to_timestamp(ts: datetime) -> float: def datetime_to_timestamp_or_none(dt: datetime | None) -> float | None: """Convert a datetime to a timestamp.""" - if dt is None: - return None - return dt_util.utc_to_timestamp(dt) + return None if dt is None else dt.timestamp() def timestamp_to_datetime_or_none(ts: float | None) -> datetime | None: diff --git a/homeassistant/components/recorder/pool.py b/homeassistant/components/recorder/pool.py index 0fa0e82e98b..30f8fa8d07a 100644 --- a/homeassistant/components/recorder/pool.py +++ b/homeassistant/components/recorder/pool.py @@ -100,7 +100,7 @@ class RecorderPool(SingletonThreadPool, NullPool): # which is allowed but discouraged since its much slower return self._do_get_db_connection_protected() # In the event loop, raise an exception - raise_for_blocking_call( + raise_for_blocking_call( # noqa: RET503 self._do_get_db_connection_protected, strict=True, advise_msg=ADVISE_MSG, diff --git a/homeassistant/components/recorder/statistics.py b/homeassistant/components/recorder/statistics.py index aeeb30816d7..ba19c016d19 100644 --- a/homeassistant/components/recorder/statistics.py +++ b/homeassistant/components/recorder/statistics.py @@ -148,6 +148,12 @@ STATISTIC_UNIT_TO_UNIT_CONVERTER: dict[str | None, type[BaseUnitConverter]] = { **{unit: VolumeFlowRateConverter for unit in VolumeFlowRateConverter.VALID_UNITS}, } + +UNIT_CLASSES = { + unit: converter.UNIT_CLASS + for unit, converter in STATISTIC_UNIT_TO_UNIT_CONVERTER.items() +} + DATA_SHORT_TERM_STATISTICS_RUN_CACHE = "recorder_short_term_statistics_run_cache" @@ -211,13 +217,6 @@ class StatisticsRow(BaseStatisticsRow, total=False): change: float | None -def _get_unit_class(unit: str | None) -> str | None: - """Get corresponding unit class from from the statistics unit.""" - if converter := STATISTIC_UNIT_TO_UNIT_CONVERTER.get(unit): - return converter.UNIT_CLASS - return None - - def get_display_unit( hass: HomeAssistant, statistic_id: str, @@ -807,7 +806,7 @@ def _statistic_by_id_from_metadata( "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], - "unit_class": _get_unit_class(meta["unit_of_measurement"]), + "unit_class": UNIT_CLASSES.get(meta["unit_of_measurement"]), "unit_of_measurement": meta["unit_of_measurement"], } for _, meta in metadata.values() @@ -881,7 +880,7 @@ def list_statistic_ids( "has_sum": meta["has_sum"], "name": meta["name"], "source": meta["source"], - "unit_class": _get_unit_class(meta["unit_of_measurement"]), + "unit_class": UNIT_CLASSES.get(meta["unit_of_measurement"]), "unit_of_measurement": meta["unit_of_measurement"], } @@ -2089,71 +2088,38 @@ def _build_stats( db_rows: list[Row], table_duration_seconds: float, start_ts_idx: int, - mean_idx: int | None, - min_idx: int | None, - max_idx: int | None, - last_reset_ts_idx: int | None, - state_idx: int | None, - sum_idx: int | None, + row_mapping: tuple[tuple[str, int], ...], ) -> list[StatisticsRow]: """Build a list of statistics without unit conversion.""" - result: list[StatisticsRow] = [] - ent_results_append = result.append - for db_row in db_rows: - row: StatisticsRow = { + return [ + { "start": (start_ts := db_row[start_ts_idx]), "end": start_ts + table_duration_seconds, + **{key: db_row[idx] for key, idx in row_mapping}, # type: ignore[typeddict-item] } - if last_reset_ts_idx is not None: - row["last_reset"] = db_row[last_reset_ts_idx] - if mean_idx is not None: - row["mean"] = db_row[mean_idx] - if min_idx is not None: - row["min"] = db_row[min_idx] - if max_idx is not None: - row["max"] = db_row[max_idx] - if state_idx is not None: - row["state"] = db_row[state_idx] - if sum_idx is not None: - row["sum"] = db_row[sum_idx] - ent_results_append(row) - return result + for db_row in db_rows + ] def _build_converted_stats( db_rows: list[Row], table_duration_seconds: float, start_ts_idx: int, - mean_idx: int | None, - min_idx: int | None, - max_idx: int | None, - last_reset_ts_idx: int | None, - state_idx: int | None, - sum_idx: int | None, + row_mapping: tuple[tuple[str, int], ...], convert: Callable[[float | None], float | None] | Callable[[float], float], ) -> list[StatisticsRow]: """Build a list of statistics with unit conversion.""" - result: list[StatisticsRow] = [] - ent_results_append = result.append - for db_row in db_rows: - row: StatisticsRow = { + return [ + { "start": (start_ts := db_row[start_ts_idx]), "end": start_ts + table_duration_seconds, + **{ + key: None if (v := db_row[idx]) is None else convert(v) # type: ignore[typeddict-item] + for key, idx in row_mapping + }, } - if last_reset_ts_idx is not None: - row["last_reset"] = db_row[last_reset_ts_idx] - if mean_idx is not None: - row["mean"] = None if (v := db_row[mean_idx]) is None else convert(v) - if min_idx is not None: - row["min"] = None if (v := db_row[min_idx]) is None else convert(v) - if max_idx is not None: - row["max"] = None if (v := db_row[max_idx]) is None else convert(v) - if state_idx is not None: - row["state"] = None if (v := db_row[state_idx]) is None else convert(v) - if sum_idx is not None: - row["sum"] = None if (v := db_row[sum_idx]) is None else convert(v) - ent_results_append(row) - return result + for db_row in db_rows + ] def _sorted_statistics_to_dict( @@ -2193,14 +2159,11 @@ def _sorted_statistics_to_dict( # Figure out which fields we need to extract from the SQL result # and which indices they have in the result so we can avoid the overhead # of doing a dict lookup for each row - mean_idx = field_map["mean"] if "mean" in types else None - min_idx = field_map["min"] if "min" in types else None - max_idx = field_map["max"] if "max" in types else None - last_reset_ts_idx = field_map["last_reset_ts"] if "last_reset" in types else None - state_idx = field_map["state"] if "state" in types else None + if "last_reset_ts" in field_map: + field_map["last_reset"] = field_map.pop("last_reset_ts") sum_idx = field_map["sum"] if "sum" in types else None sum_only = len(types) == 1 and sum_idx is not None - row_idxes = (mean_idx, min_idx, max_idx, last_reset_ts_idx, state_idx, sum_idx) + row_mapping = tuple((key, field_map[key]) for key in types if key in field_map) # Append all statistic entries, and optionally do unit conversion table_duration_seconds = table.duration.total_seconds() for meta_id, db_rows in stats_by_meta_id.items(): @@ -2229,9 +2192,9 @@ def _sorted_statistics_to_dict( else: _stats = _build_sum_stats(*build_args, sum_idx) elif convert: - _stats = _build_converted_stats(*build_args, *row_idxes, convert) + _stats = _build_converted_stats(*build_args, row_mapping, convert) else: - _stats = _build_stats(*build_args, *row_idxes) + _stats = _build_stats(*build_args, row_mapping) result[statistic_id] = _stats diff --git a/homeassistant/components/recorder/strings.json b/homeassistant/components/recorder/strings.json index f891b4d18d2..2ded6be58d6 100644 --- a/homeassistant/components/recorder/strings.json +++ b/homeassistant/components/recorder/strings.json @@ -1,11 +1,11 @@ { "system_health": { "info": { - "oldest_recorder_run": "Oldest Run Start Time", - "current_recorder_run": "Current Run Start Time", - "estimated_db_size": "Estimated Database Size (MiB)", - "database_engine": "Database Engine", - "database_version": "Database Version" + "oldest_recorder_run": "Oldest run start time", + "current_recorder_run": "Current run start time", + "estimated_db_size": "Estimated database size (MiB)", + "database_engine": "Database engine", + "database_version": "Database version" } }, "issues": { diff --git a/homeassistant/components/recorder/table_managers/statistics_meta.py b/homeassistant/components/recorder/table_managers/statistics_meta.py index 9b33eff0c9b..77fc34518db 100644 --- a/homeassistant/components/recorder/table_managers/statistics_meta.py +++ b/homeassistant/components/recorder/table_managers/statistics_meta.py @@ -4,7 +4,7 @@ from __future__ import annotations import logging import threading -from typing import TYPE_CHECKING, Literal, cast +from typing import TYPE_CHECKING, Final, Literal from lru import LRU from sqlalchemy import lambda_stmt, select @@ -33,6 +33,14 @@ QUERY_STATISTIC_META = ( StatisticsMeta.name, ) +INDEX_ID: Final = 0 +INDEX_STATISTIC_ID: Final = 1 +INDEX_SOURCE: Final = 2 +INDEX_UNIT_OF_MEASUREMENT: Final = 3 +INDEX_HAS_MEAN: Final = 4 +INDEX_HAS_SUM: Final = 5 +INDEX_NAME: Final = 6 + def _generate_get_metadata_stmt( statistic_ids: set[str] | None = None, @@ -52,23 +60,6 @@ def _generate_get_metadata_stmt( return stmt -def _statistics_meta_to_id_statistics_metadata( - meta: StatisticsMeta, -) -> tuple[int, StatisticMetaData]: - """Convert StatisticsMeta tuple of metadata_id and StatisticMetaData.""" - return ( - meta.id, - { - "has_mean": meta.has_mean, # type: ignore[typeddict-item] - "has_sum": meta.has_sum, # type: ignore[typeddict-item] - "name": meta.name, - "source": meta.source, # type: ignore[typeddict-item] - "statistic_id": meta.statistic_id, # type: ignore[typeddict-item] - "unit_of_measurement": meta.unit_of_measurement, - }, - ) - - class StatisticsMetaManager: """Manage the StatisticsMeta table.""" @@ -100,6 +91,10 @@ class StatisticsMetaManager: and self.recorder.thread_id == threading.get_ident() ) results: dict[str, tuple[int, StatisticMetaData]] = {} + id_meta: tuple[int, StatisticMetaData] + meta: StatisticMetaData + statistic_id: str + row_id: int with session.no_autoflush: stat_id_to_id_meta = self._stat_id_to_id_meta for row in execute_stmt_lambda_element( @@ -109,10 +104,17 @@ class StatisticsMetaManager: ), orm_rows=False, ): - statistics_meta = cast(StatisticsMeta, row) - id_meta = _statistics_meta_to_id_statistics_metadata(statistics_meta) - - statistic_id = cast(str, statistics_meta.statistic_id) + statistic_id = row[INDEX_STATISTIC_ID] + row_id = row[INDEX_ID] + meta = { + "has_mean": row[INDEX_HAS_MEAN], + "has_sum": row[INDEX_HAS_SUM], + "name": row[INDEX_NAME], + "source": row[INDEX_SOURCE], + "statistic_id": statistic_id, + "unit_of_measurement": row[INDEX_UNIT_OF_MEASUREMENT], + } + id_meta = (row_id, meta) results[statistic_id] = id_meta if update_cache: stat_id_to_id_meta[statistic_id] = id_meta diff --git a/homeassistant/components/recorder/websocket_api.py b/homeassistant/components/recorder/websocket_api.py index 5e0eef37721..f08f7bdcb97 100644 --- a/homeassistant/components/recorder/websocket_api.py +++ b/homeassistant/components/recorder/websocket_api.py @@ -15,6 +15,7 @@ from homeassistant.helpers import config_validation as cv from homeassistant.helpers.json import json_bytes from homeassistant.util import dt as dt_util from homeassistant.util.unit_conversion import ( + ConductivityConverter, DataRateConverter, DistanceConverter, DurationConverter, @@ -48,7 +49,7 @@ from .util import PERIOD_SCHEMA, get_instance, resolve_period UNIT_SCHEMA = vol.Schema( { - vol.Optional("conductivity"): vol.In(DataRateConverter.VALID_UNITS), + vol.Optional("conductivity"): vol.In(ConductivityConverter.VALID_UNITS), vol.Optional("data_rate"): vol.In(DataRateConverter.VALID_UNITS), vol.Optional("distance"): vol.In(DistanceConverter.VALID_UNITS), vol.Optional("duration"): vol.In(DurationConverter.VALID_UNITS), diff --git a/homeassistant/components/remember_the_milk/icons.json b/homeassistant/components/remember_the_milk/icons.json index 3ca17113fb8..04502aea5ef 100644 --- a/homeassistant/components/remember_the_milk/icons.json +++ b/homeassistant/components/remember_the_milk/icons.json @@ -1,6 +1,10 @@ { "services": { - "create_task": "mdi:check", - "complete_task": "mdi:check-all" + "create_task": { + "service": "mdi:check" + }, + "complete_task": { + "service": "mdi:check-all" + } } } diff --git a/homeassistant/components/remote/icons.json b/homeassistant/components/remote/icons.json index 07526a4bc79..43a7f6ee7b6 100644 --- a/homeassistant/components/remote/icons.json +++ b/homeassistant/components/remote/icons.json @@ -8,11 +8,23 @@ } }, "services": { - "delete_command": "mdi:delete", - "learn_command": "mdi:school", - "send_command": "mdi:remote", - "toggle": "mdi:remote", - "turn_off": "mdi:remote-off", - "turn_on": "mdi:remote" + "delete_command": { + "service": "mdi:delete" + }, + "learn_command": { + "service": "mdi:school" + }, + "send_command": { + "service": "mdi:remote" + }, + "toggle": { + "service": "mdi:remote" + }, + "turn_off": { + "service": "mdi:remote-off" + }, + "turn_on": { + "service": "mdi:remote" + } } } diff --git a/homeassistant/components/renault/icons.json b/homeassistant/components/renault/icons.json index 75356fda411..883725eb601 100644 --- a/homeassistant/components/renault/icons.json +++ b/homeassistant/components/renault/icons.json @@ -64,8 +64,14 @@ } }, "services": { - "ac_start": "mdi:hvac", - "ac_cancel": "mdi:hvac-off", - "charge_set_schedules": "mdi:calendar-clock" + "ac_start": { + "service": "mdi:hvac" + }, + "ac_cancel": { + "service": "mdi:hvac-off" + }, + "charge_set_schedules": { + "service": "mdi:calendar-clock" + } } } diff --git a/homeassistant/components/renault/manifest.json b/homeassistant/components/renault/manifest.json index 6691921e850..716f2086bf1 100644 --- a/homeassistant/components/renault/manifest.json +++ b/homeassistant/components/renault/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["renault_api"], "quality_scale": "platinum", - "requirements": ["renault-api==0.2.5"] + "requirements": ["renault-api==0.2.7"] } diff --git a/homeassistant/components/renson/icons.json b/homeassistant/components/renson/icons.json index b7b1fdfdd8c..b558759a0dd 100644 --- a/homeassistant/components/renson/icons.json +++ b/homeassistant/components/renson/icons.json @@ -17,8 +17,14 @@ } }, "services": { - "set_timer_level": "mdi:timer", - "set_breeze": "mdi:weather-windy", - "set_pollution_settings": "mdi:air-filter" + "set_timer_level": { + "service": "mdi:timer" + }, + "set_breeze": { + "service": "mdi:weather-windy" + }, + "set_pollution_settings": { + "service": "mdi:air-filter" + } } } diff --git a/homeassistant/components/reolink/__init__.py b/homeassistant/components/reolink/__init__.py index cc293d970b2..f64c6bd9cf3 100644 --- a/homeassistant/components/reolink/__init__.py +++ b/homeassistant/components/reolink/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations import asyncio -from dataclasses import dataclass from datetime import timedelta import logging @@ -14,13 +13,20 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.typing import ConfigType from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from .const import DOMAIN from .exceptions import PasswordIncompatible, ReolinkException, UserNotAdmin from .host import ReolinkHost +from .services import async_setup_services +from .util import ReolinkData, get_device_uid_and_ch _LOGGER = logging.getLogger(__name__) @@ -40,14 +46,14 @@ DEVICE_UPDATE_INTERVAL = timedelta(seconds=60) FIRMWARE_UPDATE_INTERVAL = timedelta(hours=12) NUM_CRED_ERRORS = 3 +CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) -@dataclass -class ReolinkData: - """Data for the Reolink integration.""" - host: ReolinkHost - device_coordinator: DataUpdateCoordinator[None] - firmware_coordinator: DataUpdateCoordinator[None] +async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: + """Set up Reolink shared code.""" + + async_setup_services(hass) + return True async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: @@ -188,6 +194,46 @@ async def async_remove_config_entry_device( host: ReolinkHost = hass.data[DOMAIN][config_entry.entry_id].host (device_uid, ch, is_chime) = get_device_uid_and_ch(device, host) + if is_chime: + await host.api.get_state(cmd="GetDingDongList") + chime = host.api.chime(ch) + if ( + chime is None + or chime.connect_state is None + or chime.connect_state < 0 + or chime.channel not in host.api.channels + ): + _LOGGER.debug( + "Removing Reolink chime %s with id %s, " + "since it is not coupled to %s anymore", + device.name, + ch, + host.api.nvr_name, + ) + return True + + # remove the chime from the host + await chime.remove() + await host.api.get_state(cmd="GetDingDongList") + if chime.connect_state < 0: + _LOGGER.debug( + "Removed Reolink chime %s with id %s from %s", + device.name, + ch, + host.api.nvr_name, + ) + return True + + _LOGGER.warning( + "Cannot remove Reolink chime %s with id %s, because it is still connected " + "to %s, please first remove the chime " + "in the reolink app", + device.name, + ch, + host.api.nvr_name, + ) + return False + if not host.api.is_nvr or ch is None: _LOGGER.warning( "Cannot remove Reolink device %s, because it is not a camera connected " @@ -225,28 +271,6 @@ async def async_remove_config_entry_device( return False -def get_device_uid_and_ch( - device: dr.DeviceEntry, host: ReolinkHost -) -> tuple[list[str], int | None, bool]: - """Get the channel and the split device_uid from a reolink DeviceEntry.""" - device_uid = [ - dev_id[1].split("_") for dev_id in device.identifiers if dev_id[0] == DOMAIN - ][0] - - is_chime = False - if len(device_uid) < 2: - # NVR itself - ch = None - elif device_uid[1].startswith("ch") and len(device_uid[1]) <= 5: - ch = int(device_uid[1][2:]) - elif device_uid[1].startswith("chime"): - ch = int(device_uid[1][5:]) - is_chime = True - else: - ch = host.api.channel_for_uid(device_uid[1]) - return (device_uid, ch, is_chime) - - def migrate_entity_ids( hass: HomeAssistant, config_entry_id: str, host: ReolinkHost ) -> None: diff --git a/homeassistant/components/reolink/entity.py b/homeassistant/components/reolink/entity.py index 053792ad667..c47822e125c 100644 --- a/homeassistant/components/reolink/entity.py +++ b/homeassistant/components/reolink/entity.py @@ -190,3 +190,8 @@ class ReolinkChimeCoordinatorEntity(ReolinkChannelCoordinatorEntity): serial_number=str(chime.dev_id), configuration_url=self._conf_url, ) + + @property + def available(self) -> bool: + """Return True if entity is available.""" + return self._chime.online and super().available diff --git a/homeassistant/components/reolink/icons.json b/homeassistant/components/reolink/icons.json index 7ca4c2d7f2b..f1c6f88a0f0 100644 --- a/homeassistant/components/reolink/icons.json +++ b/homeassistant/components/reolink/icons.json @@ -300,6 +300,11 @@ } }, "services": { - "ptz_move": "mdi:pan" + "ptz_move": { + "service": "mdi:pan" + }, + "play_chime": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/reolink/manifest.json b/homeassistant/components/reolink/manifest.json index 9671a4b4fc1..b90f7f4a045 100644 --- a/homeassistant/components/reolink/manifest.json +++ b/homeassistant/components/reolink/manifest.json @@ -18,5 +18,5 @@ "documentation": "https://www.home-assistant.io/integrations/reolink", "iot_class": "local_push", "loggers": ["reolink_aio"], - "requirements": ["reolink-aio==0.9.7"] + "requirements": ["reolink-aio==0.9.8"] } diff --git a/homeassistant/components/reolink/media_source.py b/homeassistant/components/reolink/media_source.py index ae865b77913..3c5d60030a3 100644 --- a/homeassistant/components/reolink/media_source.py +++ b/homeassistant/components/reolink/media_source.py @@ -174,10 +174,7 @@ class ReolinkVODMediaSource(MediaSource): if len(ch_id) > 3: ch = host.api.channel_for_uid(ch_id) - if ( - host.api.api_version("recReplay", int(ch)) < 1 - or not host.api.hdd_info - ): + if not host.api.supported(int(ch), "replay") or not host.api.hdd_info: # playback stream not supported by this camera or no storage installed continue @@ -281,12 +278,16 @@ class ReolinkVODMediaSource(MediaSource): config_entry_id, channel, "sub" ) + title = host.api.camera_name(channel) + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"RESs|{config_entry_id}|{channel}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=host.api.camera_name(channel), + title=title, can_play=False, can_expand=True, children=children, @@ -328,12 +329,16 @@ class ReolinkVODMediaSource(MediaSource): for day in status.days ] + title = f"{host.api.camera_name(channel)} {res_name(stream)}" + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel} {res_name(stream)}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"DAYS|{config_entry_id}|{channel}|{stream}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=f"{host.api.camera_name(channel)} {res_name(stream)}", + title=title, can_play=False, can_expand=True, children=children, @@ -388,12 +393,18 @@ class ReolinkVODMediaSource(MediaSource): ) ) + title = ( + f"{host.api.camera_name(channel)} {res_name(stream)} {year}/{month}/{day}" + ) + if host.api.model in DUAL_LENS_MODELS: + title = f"{host.api.camera_name(channel)} lens {channel} {res_name(stream)} {year}/{month}/{day}" + return BrowseMediaSource( domain=DOMAIN, identifier=f"FILES|{config_entry_id}|{channel}|{stream}", media_class=MediaClass.CHANNEL, media_content_type=MediaType.PLAYLIST, - title=f"{host.api.camera_name(channel)} {res_name(stream)} {year}/{month}/{day}", + title=title, can_play=False, can_expand=True, children=children, diff --git a/homeassistant/components/reolink/services.py b/homeassistant/components/reolink/services.py new file mode 100644 index 00000000000..d5cb402c74b --- /dev/null +++ b/homeassistant/components/reolink/services.py @@ -0,0 +1,80 @@ +"""Reolink additional services.""" + +from __future__ import annotations + +from reolink_aio.api import Chime +from reolink_aio.enums import ChimeToneEnum +from reolink_aio.exceptions import InvalidParameterError, ReolinkError +import voluptuous as vol + +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_DEVICE_ID +from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN +from .host import ReolinkHost +from .util import get_device_uid_and_ch + +ATTR_RINGTONE = "ringtone" + + +@callback +def async_setup_services(hass: HomeAssistant) -> None: + """Set up Reolink services.""" + + async def async_play_chime(service_call: ServiceCall) -> None: + """Play a ringtone.""" + service_data = service_call.data + device_registry = dr.async_get(hass) + + for device_id in service_data[ATTR_DEVICE_ID]: + config_entry = None + device = device_registry.async_get(device_id) + if device is not None: + for entry_id in device.config_entries: + config_entry = hass.config_entries.async_get_entry(entry_id) + if config_entry is not None and config_entry.domain == DOMAIN: + break + if ( + config_entry is None + or device is None + or config_entry.state == ConfigEntryState.NOT_LOADED + ): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="service_entry_ex", + translation_placeholders={"service_name": "play_chime"}, + ) + host: ReolinkHost = hass.data[DOMAIN][config_entry.entry_id].host + (device_uid, chime_id, is_chime) = get_device_uid_and_ch(device, host) + chime: Chime | None = host.api.chime(chime_id) + if not is_chime or chime is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="service_not_chime", + translation_placeholders={"device_name": str(device.name)}, + ) + + ringtone = service_data[ATTR_RINGTONE] + try: + await chime.play(ChimeToneEnum[ringtone].value) + except InvalidParameterError as err: + raise ServiceValidationError(err) from err + except ReolinkError as err: + raise HomeAssistantError(err) from err + + hass.services.async_register( + DOMAIN, + "play_chime", + async_play_chime, + schema=vol.Schema( + { + vol.Required(ATTR_DEVICE_ID): list[str], + vol.Required(ATTR_RINGTONE): vol.In( + [method.name for method in ChimeToneEnum][1:] + ), + } + ), + ) diff --git a/homeassistant/components/reolink/services.yaml b/homeassistant/components/reolink/services.yaml index 42b9af34eb0..fe7fba9cdc7 100644 --- a/homeassistant/components/reolink/services.yaml +++ b/homeassistant/components/reolink/services.yaml @@ -16,3 +16,30 @@ ptz_move: min: 1 max: 64 step: 1 + +play_chime: + fields: + device_id: + required: true + selector: + device: + multiple: true + filter: + integration: reolink + model: "Reolink Chime" + ringtone: + required: true + selector: + select: + translation_key: ringtone + options: + - citybird + - originaltune + - pianokey + - loop + - attraction + - hophop + - goodday + - operetta + - moonlight + - waybackhome diff --git a/homeassistant/components/reolink/strings.json b/homeassistant/components/reolink/strings.json index cad09f71562..3710c3743fa 100644 --- a/homeassistant/components/reolink/strings.json +++ b/homeassistant/components/reolink/strings.json @@ -50,6 +50,14 @@ } } }, + "exceptions": { + "service_entry_ex": { + "message": "Reolink {service_name} error: config entry not found or not loaded" + }, + "service_not_chime": { + "message": "Reolink play_chime error: {device_name} is not a chime" + } + }, "issues": { "https_webhook": { "title": "Reolink webhook URL uses HTTPS (SSL)", @@ -86,6 +94,36 @@ "description": "PTZ move speed." } } + }, + "play_chime": { + "name": "Play chime", + "description": "Play a ringtone on a chime.", + "fields": { + "device_id": { + "name": "Target chime", + "description": "The chime to play the ringtone on." + }, + "ringtone": { + "name": "Ringtone", + "description": "Ringtone to play." + } + } + } + }, + "selector": { + "ringtone": { + "options": { + "citybird": "[%key:component::reolink::entity::select::motion_tone::state::citybird%]", + "originaltune": "[%key:component::reolink::entity::select::motion_tone::state::originaltune%]", + "pianokey": "[%key:component::reolink::entity::select::motion_tone::state::pianokey%]", + "loop": "[%key:component::reolink::entity::select::motion_tone::state::loop%]", + "attraction": "[%key:component::reolink::entity::select::motion_tone::state::attraction%]", + "hophop": "[%key:component::reolink::entity::select::motion_tone::state::hophop%]", + "goodday": "[%key:component::reolink::entity::select::motion_tone::state::goodday%]", + "operetta": "[%key:component::reolink::entity::select::motion_tone::state::operetta%]", + "moonlight": "[%key:component::reolink::entity::select::motion_tone::state::moonlight%]", + "waybackhome": "[%key:component::reolink::entity::select::motion_tone::state::waybackhome%]" + } } }, "entity": { diff --git a/homeassistant/components/reolink/util.py b/homeassistant/components/reolink/util.py index cf4659224e3..305579e35cb 100644 --- a/homeassistant/components/reolink/util.py +++ b/homeassistant/components/reolink/util.py @@ -2,11 +2,24 @@ from __future__ import annotations +from dataclasses import dataclass + from homeassistant import config_entries from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator -from . import ReolinkData from .const import DOMAIN +from .host import ReolinkHost + + +@dataclass +class ReolinkData: + """Data for the Reolink integration.""" + + host: ReolinkHost + device_coordinator: DataUpdateCoordinator[None] + firmware_coordinator: DataUpdateCoordinator[None] def is_connected(hass: HomeAssistant, config_entry: config_entries.ConfigEntry) -> bool: @@ -19,3 +32,25 @@ def is_connected(hass: HomeAssistant, config_entry: config_entries.ConfigEntry) and config_entry.state == config_entries.ConfigEntryState.LOADED and reolink_data.device_coordinator.last_update_success ) + + +def get_device_uid_and_ch( + device: dr.DeviceEntry, host: ReolinkHost +) -> tuple[list[str], int | None, bool]: + """Get the channel and the split device_uid from a reolink DeviceEntry.""" + device_uid = [ + dev_id[1].split("_") for dev_id in device.identifiers if dev_id[0] == DOMAIN + ][0] + + is_chime = False + if len(device_uid) < 2: + # NVR itself + ch = None + elif device_uid[1].startswith("ch") and len(device_uid[1]) <= 5: + ch = int(device_uid[1][2:]) + elif device_uid[1].startswith("chime"): + ch = int(device_uid[1][5:]) + is_chime = True + else: + ch = host.api.channel_for_uid(device_uid[1]) + return (device_uid, ch, is_chime) diff --git a/homeassistant/components/rest/__init__.py b/homeassistant/components/rest/__init__.py index b7cdee2e039..59239ad6744 100644 --- a/homeassistant/components/rest/__init__.py +++ b/homeassistant/components/rest/__init__.py @@ -202,19 +202,14 @@ def create_rest_data_from_config(hass: HomeAssistant, config: ConfigType) -> Res timeout: int = config[CONF_TIMEOUT] encoding: str = config[CONF_ENCODING] if resource_template is not None: - resource_template.hass = hass resource = resource_template.async_render(parse_result=False) if payload_template is not None: - payload_template.hass = hass payload = payload_template.async_render(parse_result=False) if not resource: raise HomeAssistantError("Resource not set for RestData") - template.attach(hass, headers) - template.attach(hass, params) - auth: httpx.DigestAuth | tuple[str, str] | None = None if username and password: if config.get(CONF_AUTHENTICATION) == HTTP_DIGEST_AUTHENTICATION: diff --git a/homeassistant/components/rest/binary_sensor.py b/homeassistant/components/rest/binary_sensor.py index e8119a40f8c..c976506d1ba 100644 --- a/homeassistant/components/rest/binary_sensor.py +++ b/homeassistant/components/rest/binary_sensor.py @@ -133,8 +133,6 @@ class RestBinarySensor(ManualTriggerEntity, RestEntity, BinarySensorEntity): ) self._previous_data = None self._value_template: Template | None = config.get(CONF_VALUE_TEMPLATE) - if (value_template := self._value_template) is not None: - value_template.hass = hass @property def available(self) -> bool: diff --git a/homeassistant/components/rest/icons.json b/homeassistant/components/rest/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/rest/icons.json +++ b/homeassistant/components/rest/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/rest/notify.py b/homeassistant/components/rest/notify.py index c8314d18707..1ca3c55e2b2 100644 --- a/homeassistant/components/rest/notify.py +++ b/homeassistant/components/rest/notify.py @@ -172,7 +172,6 @@ class RestNotificationService(BaseNotificationService): } if not isinstance(value, Template): return value - value.hass = self._hass return value.async_render(kwargs, parse_result=False) if self._data: diff --git a/homeassistant/components/rest/sensor.py b/homeassistant/components/rest/sensor.py index d7bb0ea33fb..fc6ce8c6749 100644 --- a/homeassistant/components/rest/sensor.py +++ b/homeassistant/components/rest/sensor.py @@ -139,8 +139,6 @@ class RestSensor(ManualTriggerSensorEntity, RestEntity): config[CONF_FORCE_UPDATE], ) self._value_template = config.get(CONF_VALUE_TEMPLATE) - if (value_template := self._value_template) is not None: - value_template.hass = hass self._json_attrs = config.get(CONF_JSON_ATTRS) self._json_attrs_path = config.get(CONF_JSON_ATTRS_PATH) self._attr_extra_state_attributes = {} diff --git a/homeassistant/components/rest/switch.py b/homeassistant/components/rest/switch.py index d01aab2cf9f..e4bb1f797d9 100644 --- a/homeassistant/components/rest/switch.py +++ b/homeassistant/components/rest/switch.py @@ -151,14 +151,6 @@ class RestSwitch(ManualTriggerEntity, SwitchEntity): self._timeout: int = config[CONF_TIMEOUT] self._verify_ssl: bool = config[CONF_VERIFY_SSL] - self._body_on.hass = hass - self._body_off.hass = hass - if (is_on_template := self._is_on_template) is not None: - is_on_template.hass = hass - - template.attach(hass, self._headers) - template.attach(hass, self._params) - async def async_added_to_hass(self) -> None: """Handle adding to Home Assistant.""" await super().async_added_to_hass() diff --git a/homeassistant/components/rest_command/__init__.py b/homeassistant/components/rest_command/__init__.py index b6945c5ce98..ee93fde35fa 100644 --- a/homeassistant/components/rest_command/__init__.py +++ b/homeassistant/components/rest_command/__init__.py @@ -96,7 +96,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: method = command_config[CONF_METHOD] template_url = command_config[CONF_URL] - template_url.hass = hass auth = None if CONF_USERNAME in command_config: @@ -107,11 +106,8 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: template_payload = None if CONF_PAYLOAD in command_config: template_payload = command_config[CONF_PAYLOAD] - template_payload.hass = hass template_headers = command_config.get(CONF_HEADERS, {}) - for template_header in template_headers.values(): - template_header.hass = hass content_type = command_config.get(CONF_CONTENT_TYPE) diff --git a/homeassistant/components/rest_command/icons.json b/homeassistant/components/rest_command/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/rest_command/icons.json +++ b/homeassistant/components/rest_command/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/rflink/icons.json b/homeassistant/components/rflink/icons.json index 988b048eee7..de2942f44ac 100644 --- a/homeassistant/components/rflink/icons.json +++ b/homeassistant/components/rflink/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_command": "mdi:send" + "send_command": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/rfxtrx/icons.json b/homeassistant/components/rfxtrx/icons.json index c1b8e741e45..cbc48cf2105 100644 --- a/homeassistant/components/rfxtrx/icons.json +++ b/homeassistant/components/rfxtrx/icons.json @@ -1,5 +1,7 @@ { "services": { - "send": "mdi:send" + "send": { + "service": "mdi:send" + } } } diff --git a/homeassistant/components/ring/__init__.py b/homeassistant/components/ring/__init__.py index 36c66550ddc..14ab435fda6 100644 --- a/homeassistant/components/ring/__init__.py +++ b/homeassistant/components/ring/__init__.py @@ -3,7 +3,6 @@ from __future__ import annotations from dataclasses import dataclass -from functools import partial import logging from typing import Any, cast @@ -13,6 +12,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import APPLICATION_NAME, CONF_TOKEN, __version__ from homeassistant.core import HomeAssistant, ServiceCall, callback from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import DOMAIN, PLATFORMS @@ -35,17 +35,17 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up a config entry.""" def token_updater(token: dict[str, Any]) -> None: - """Handle from sync context when token is updated.""" - hass.loop.call_soon_threadsafe( - partial( - hass.config_entries.async_update_entry, - entry, - data={**entry.data, CONF_TOKEN: token}, - ) + """Handle from async context when token is updated.""" + hass.config_entries.async_update_entry( + entry, + data={**entry.data, CONF_TOKEN: token}, ) auth = Auth( - f"{APPLICATION_NAME}/{__version__}", entry.data[CONF_TOKEN], token_updater + f"{APPLICATION_NAME}/{__version__}", + entry.data[CONF_TOKEN], + token_updater, + http_client_session=async_get_clientsession(hass), ) ring = Ring(auth) diff --git a/homeassistant/components/ring/button.py b/homeassistant/components/ring/button.py index 15d56a8b7cf..c8d7d902d18 100644 --- a/homeassistant/components/ring/button.py +++ b/homeassistant/components/ring/button.py @@ -53,6 +53,6 @@ class RingDoorButton(RingEntity[RingOther], ButtonEntity): self._attr_unique_id = f"{device.id}-{description.key}" @exception_wrap - def press(self) -> None: + async def async_press(self) -> None: """Open the door.""" - self._device.open_door() + await self._device.async_open_door() diff --git a/homeassistant/components/ring/camera.py b/homeassistant/components/ring/camera.py index ba75b68434d..df71de29089 100644 --- a/homeassistant/components/ring/camera.py +++ b/homeassistant/components/ring/camera.py @@ -81,6 +81,8 @@ class RingCam(RingEntity[RingDoorBell], Camera): history_data = self._device.last_history if history_data: self._last_event = history_data[0] + # will call async_update to update the attributes and get the + # video url from the api self.async_schedule_update_ha_state(True) else: self._last_event = None @@ -159,36 +161,36 @@ class RingCam(RingEntity[RingDoorBell], Camera): if self._last_video_id != self._last_event["id"]: self._image = None - self._video_url = await self.hass.async_add_executor_job(self._get_video) + self._video_url = await self._async_get_video() self._last_video_id = self._last_event["id"] self._expires_at = FORCE_REFRESH_INTERVAL + utcnow @exception_wrap - def _get_video(self) -> str | None: + async def _async_get_video(self) -> str | None: if TYPE_CHECKING: # _last_event is set before calling update so will never be None assert self._last_event event_id = self._last_event.get("id") assert event_id and isinstance(event_id, int) - return self._device.recording_url(event_id) + return await self._device.async_recording_url(event_id) @exception_wrap - def _set_motion_detection_enabled(self, new_state: bool) -> None: + async def _async_set_motion_detection_enabled(self, new_state: bool) -> None: if not self._device.has_capability(MOTION_DETECTION_CAPABILITY): _LOGGER.error( "Entity %s does not have motion detection capability", self.entity_id ) return - self._device.motion_detection = new_state + await self._device.async_set_motion_detection(new_state) self._attr_motion_detection_enabled = new_state - self.schedule_update_ha_state(False) + self.async_write_ha_state() - def enable_motion_detection(self) -> None: + async def async_enable_motion_detection(self) -> None: """Enable motion detection in the camera.""" - self._set_motion_detection_enabled(True) + await self._async_set_motion_detection_enabled(True) - def disable_motion_detection(self) -> None: + async def async_disable_motion_detection(self) -> None: """Disable motion detection in camera.""" - self._set_motion_detection_enabled(False) + await self._async_set_motion_detection_enabled(False) diff --git a/homeassistant/components/ring/config_flow.py b/homeassistant/components/ring/config_flow.py index 6239105580d..b82b4f22223 100644 --- a/homeassistant/components/ring/config_flow.py +++ b/homeassistant/components/ring/config_flow.py @@ -17,6 +17,7 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.aiohttp_client import async_get_clientsession from .const import CONF_2FA, DOMAIN @@ -31,11 +32,13 @@ STEP_REAUTH_DATA_SCHEMA = vol.Schema({vol.Required(CONF_PASSWORD): str}) async def validate_input(hass: HomeAssistant, data: dict[str, str]) -> dict[str, Any]: """Validate the user input allows us to connect.""" - auth = Auth(f"{APPLICATION_NAME}/{ha_version}") + auth = Auth( + f"{APPLICATION_NAME}/{ha_version}", + http_client_session=async_get_clientsession(hass), + ) try: - token = await hass.async_add_executor_job( - auth.fetch_token, + token = await auth.async_fetch_token( data[CONF_USERNAME], data[CONF_PASSWORD], data.get(CONF_2FA), diff --git a/homeassistant/components/ring/coordinator.py b/homeassistant/components/ring/coordinator.py index 1a52fc78988..600743005eb 100644 --- a/homeassistant/components/ring/coordinator.py +++ b/homeassistant/components/ring/coordinator.py @@ -1,8 +1,9 @@ """Data coordinators for the ring integration.""" from asyncio import TaskGroup -from collections.abc import Callable +from collections.abc import Callable, Coroutine import logging +from typing import Any from ring_doorbell import AuthenticationError, Ring, RingDevices, RingError, RingTimeout @@ -16,10 +17,13 @@ _LOGGER = logging.getLogger(__name__) async def _call_api[*_Ts, _R]( - hass: HomeAssistant, target: Callable[[*_Ts], _R], *args: *_Ts, msg_suffix: str = "" + hass: HomeAssistant, + target: Callable[[*_Ts], Coroutine[Any, Any, _R]], + *args: *_Ts, + msg_suffix: str = "", ) -> _R: try: - return await hass.async_add_executor_job(target, *args) + return await target(*args) except AuthenticationError as err: # Raising ConfigEntryAuthFailed will cancel future updates # and start a config flow with SOURCE_REAUTH (async_step_reauth) @@ -52,7 +56,9 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]): async def _async_update_data(self) -> RingDevices: """Fetch data from API endpoint.""" - update_method: str = "update_data" if self.first_call else "update_devices" + update_method: str = ( + "async_update_data" if self.first_call else "async_update_devices" + ) await _call_api(self.hass, getattr(self.ring_api, update_method)) self.first_call = False devices: RingDevices = self.ring_api.devices() @@ -67,7 +73,7 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]): tg.create_task( _call_api( self.hass, - lambda device: device.history(limit=10), + lambda device: device.async_history(limit=10), device, msg_suffix=f" for device {device.name}", # device_id is the mac ) @@ -75,7 +81,7 @@ class RingDataCoordinator(DataUpdateCoordinator[RingDevices]): tg.create_task( _call_api( self.hass, - device.update_health_data, + device.async_update_health_data, msg_suffix=f" for device {device.name}", ) ) @@ -100,4 +106,4 @@ class RingNotificationsCoordinator(DataUpdateCoordinator[None]): async def _async_update_data(self) -> None: """Fetch data from API endpoint.""" - await _call_api(self.hass, self.ring_api.update_dings) + await _call_api(self.hass, self.ring_api.async_update_dings) diff --git a/homeassistant/components/ring/entity.py b/homeassistant/components/ring/entity.py index a4275815450..72deb09b76f 100644 --- a/homeassistant/components/ring/entity.py +++ b/homeassistant/components/ring/entity.py @@ -1,6 +1,6 @@ """Base class for Ring entity.""" -from collections.abc import Callable +from collections.abc import Callable, Coroutine from typing import Any, Concatenate, Generic, cast from ring_doorbell import ( @@ -29,25 +29,23 @@ _RingCoordinatorT = TypeVar( def exception_wrap[_RingBaseEntityT: RingBaseEntity[Any, Any], **_P, _R]( - func: Callable[Concatenate[_RingBaseEntityT, _P], _R], -) -> Callable[Concatenate[_RingBaseEntityT, _P], _R]: + async_func: Callable[Concatenate[_RingBaseEntityT, _P], Coroutine[Any, Any, _R]], +) -> Callable[Concatenate[_RingBaseEntityT, _P], Coroutine[Any, Any, _R]]: """Define a wrapper to catch exceptions and raise HomeAssistant errors.""" - def _wrap(self: _RingBaseEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: + async def _wrap(self: _RingBaseEntityT, *args: _P.args, **kwargs: _P.kwargs) -> _R: try: - return func(self, *args, **kwargs) + return await async_func(self, *args, **kwargs) except AuthenticationError as err: - self.hass.loop.call_soon_threadsafe( - self.coordinator.config_entry.async_start_reauth, self.hass - ) + self.coordinator.config_entry.async_start_reauth(self.hass) raise HomeAssistantError(err) from err except RingTimeout as err: raise HomeAssistantError( - f"Timeout communicating with API {func}: {err}" + f"Timeout communicating with API {async_func}: {err}" ) from err except RingError as err: raise HomeAssistantError( - f"Error communicating with API{func}: {err}" + f"Error communicating with API{async_func}: {err}" ) from err return _wrap diff --git a/homeassistant/components/ring/icons.json b/homeassistant/components/ring/icons.json index 9dd31fd0fd1..5820fbf77c8 100644 --- a/homeassistant/components/ring/icons.json +++ b/homeassistant/components/ring/icons.json @@ -39,6 +39,8 @@ } }, "services": { - "update": "mdi:refresh" + "update": { + "service": "mdi:refresh" + } } } diff --git a/homeassistant/components/ring/light.py b/homeassistant/components/ring/light.py index 5747c9e77f7..99c4105f4e9 100644 --- a/homeassistant/components/ring/light.py +++ b/homeassistant/components/ring/light.py @@ -80,18 +80,18 @@ class RingLight(RingEntity[RingStickUpCam], LightEntity): super()._handle_coordinator_update() @exception_wrap - def _set_light(self, new_state: OnOffState) -> None: + async def _async_set_light(self, new_state: OnOffState) -> None: """Update light state, and causes Home Assistant to correctly update.""" - self._device.lights = new_state + await self._device.async_set_lights(new_state) self._attr_is_on = new_state == OnOffState.ON self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY - self.schedule_update_ha_state() + self.async_write_ha_state() - def turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self, **kwargs: Any) -> None: """Turn the light on for 30 seconds.""" - self._set_light(OnOffState.ON) + await self._async_set_light(OnOffState.ON) - def turn_off(self, **kwargs: Any) -> None: + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the light off.""" - self._set_light(OnOffState.OFF) + await self._async_set_light(OnOffState.OFF) diff --git a/homeassistant/components/ring/manifest.json b/homeassistant/components/ring/manifest.json index a3d15bd711d..23e7b882efe 100644 --- a/homeassistant/components/ring/manifest.json +++ b/homeassistant/components/ring/manifest.json @@ -14,5 +14,5 @@ "iot_class": "cloud_polling", "loggers": ["ring_doorbell"], "quality_scale": "silver", - "requirements": ["ring-doorbell[listen]==0.8.12"] + "requirements": ["ring-doorbell[listen]==0.9.0"] } diff --git a/homeassistant/components/ring/siren.py b/homeassistant/components/ring/siren.py index f63f9d33182..665de07a5bb 100644 --- a/homeassistant/components/ring/siren.py +++ b/homeassistant/components/ring/siren.py @@ -47,8 +47,8 @@ class RingChimeSiren(RingEntity[RingChime], SirenEntity): self._attr_unique_id = f"{self._device.id}-siren" @exception_wrap - def turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self, **kwargs: Any) -> None: """Play the test sound on a Ring Chime device.""" tone = kwargs.get(ATTR_TONE) or RingEventKind.DING.value - self._device.test_sound(kind=tone) + await self._device.async_test_sound(kind=tone) diff --git a/homeassistant/components/ring/switch.py b/homeassistant/components/ring/switch.py index 0e032907bae..effb43cedbe 100644 --- a/homeassistant/components/ring/switch.py +++ b/homeassistant/components/ring/switch.py @@ -81,18 +81,18 @@ class SirenSwitch(BaseRingSwitch): super()._handle_coordinator_update() @exception_wrap - def _set_switch(self, new_state: int) -> None: + async def _async_set_switch(self, new_state: int) -> None: """Update switch state, and causes Home Assistant to correctly update.""" - self._device.siren = new_state + await self._device.async_set_siren(new_state) self._attr_is_on = new_state > 0 self._no_updates_until = dt_util.utcnow() + SKIP_UPDATES_DELAY - self.schedule_update_ha_state() + self.async_write_ha_state() - def turn_on(self, **kwargs: Any) -> None: + async def async_turn_on(self, **kwargs: Any) -> None: """Turn the siren on for 30 seconds.""" - self._set_switch(1) + await self._async_set_switch(1) - def turn_off(self, **kwargs: Any) -> None: + async def async_turn_off(self, **kwargs: Any) -> None: """Turn the siren off.""" - self._set_switch(0) + await self._async_set_switch(0) diff --git a/homeassistant/components/roborock/__init__.py b/homeassistant/components/roborock/__init__.py index 3743faa32d8..88a603eca2b 100644 --- a/homeassistant/components/roborock/__init__.py +++ b/homeassistant/components/roborock/__init__.py @@ -168,13 +168,15 @@ async def setup_device_v1( home_data_rooms: list[HomeDataRoom], ) -> RoborockDataUpdateCoordinator | None: """Set up a device Coordinator.""" - mqtt_client = RoborockMqttClientV1(user_data, DeviceData(device, product_info.name)) + mqtt_client = await hass.async_add_executor_job( + RoborockMqttClientV1, user_data, DeviceData(device, product_info.name) + ) try: networking = await mqtt_client.get_networking() if networking is None: # If the api does not return an error but does return None for # get_networking - then we need to go through cache checking. - raise RoborockException("Networking request returned None.") + raise RoborockException("Networking request returned None.") # noqa: TRY301 except RoborockException as err: _LOGGER.warning( "Not setting up %s because we could not get the network information of the device. " diff --git a/homeassistant/components/roborock/config_flow.py b/homeassistant/components/roborock/config_flow.py index 2b409bdf8c4..c6dee7ce4ed 100644 --- a/homeassistant/components/roborock/config_flow.py +++ b/homeassistant/components/roborock/config_flow.py @@ -12,6 +12,7 @@ from roborock.exceptions import ( RoborockException, RoborockInvalidCode, RoborockInvalidEmail, + RoborockTooFrequentCodeRequests, RoborockUrlException, ) from roborock.web_api import RoborockApiClient @@ -83,6 +84,8 @@ class RoborockFlowHandler(ConfigFlow, domain=DOMAIN): errors["base"] = "unknown_url" except RoborockInvalidEmail: errors["base"] = "invalid_email_format" + except RoborockTooFrequentCodeRequests: + errors["base"] = "too_frequent_code_requests" except RoborockException: _LOGGER.exception("Unexpected exception") errors["base"] = "unknown_roborock" diff --git a/homeassistant/components/roborock/icons.json b/homeassistant/components/roborock/icons.json index 6a615ab82a1..c7df6d35460 100644 --- a/homeassistant/components/roborock/icons.json +++ b/homeassistant/components/roborock/icons.json @@ -119,6 +119,8 @@ } }, "services": { - "get_maps": "mdi:floor-plan" + "get_maps": { + "service": "mdi:floor-plan" + } } } diff --git a/homeassistant/components/roborock/strings.json b/homeassistant/components/roborock/strings.json index 081e4c68a75..d1fc50f27e8 100644 --- a/homeassistant/components/roborock/strings.json +++ b/homeassistant/components/roborock/strings.json @@ -22,6 +22,7 @@ "invalid_code": "The code you entered was incorrect, please check it and try again.", "invalid_email": "There is no account associated with the email you entered, please try again.", "invalid_email_format": "There is an issue with the formatting of your email - please try again.", + "too_frequent_code_requests": "You have attempted to request too many codes. Try again later.", "unknown_roborock": "There was an unknown roborock exception - please check your logs.", "unknown_url": "There was an issue determining the correct url for your roborock account - please check your logs.", "unknown": "[%key:common::config_flow::error::unknown%]" diff --git a/homeassistant/components/roborock/vacuum.py b/homeassistant/components/roborock/vacuum.py index f7fc58161a8..81a10e26415 100644 --- a/homeassistant/components/roborock/vacuum.py +++ b/homeassistant/components/roborock/vacuum.py @@ -69,7 +69,7 @@ async def async_setup_entry( platform.async_register_entity_service( GET_MAPS_SERVICE_NAME, - {}, + None, RoborockVacuum.get_maps.__name__, supports_response=SupportsResponse.ONLY, ) diff --git a/homeassistant/components/roku/__init__.py b/homeassistant/components/roku/__init__.py index 0620207a8ee..7515f375054 100644 --- a/homeassistant/components/roku/__init__.py +++ b/homeassistant/components/roku/__init__.py @@ -5,13 +5,10 @@ from __future__ import annotations from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from .const import DOMAIN from .coordinator import RokuDataUpdateCoordinator -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [ Platform.BINARY_SENSOR, Platform.MEDIA_PLAYER, diff --git a/homeassistant/components/roku/icons.json b/homeassistant/components/roku/icons.json index 02e5d1e5698..355b5a715e5 100644 --- a/homeassistant/components/roku/icons.json +++ b/homeassistant/components/roku/icons.json @@ -32,6 +32,8 @@ } }, "services": { - "search": "mdi:magnify" + "search": { + "service": "mdi:magnify" + } } } diff --git a/homeassistant/components/roon/config_flow.py b/homeassistant/components/roon/config_flow.py index f555cc52dd1..de220454852 100644 --- a/homeassistant/components/roon/config_flow.py +++ b/homeassistant/components/roon/config_flow.py @@ -2,11 +2,12 @@ import asyncio import logging +from typing import Any from roonapi import RoonApi, RoonDiscovery import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -36,14 +37,14 @@ TIMEOUT = 120 class RoonHub: """Interact with roon during config flow.""" - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Initialise the RoonHub.""" self._hass = hass - async def discover(self): + async def discover(self) -> list[tuple[str, int]]: """Try and discover roon servers.""" - def get_discovered_servers(discovery): + def get_discovered_servers(discovery: RoonDiscovery) -> list[tuple[str, int]]: servers = discovery.all() discovery.stop() return servers @@ -93,7 +94,7 @@ class RoonHub: return (token, core_id, core_name) -async def discover(hass): +async def discover(hass: HomeAssistant) -> list[tuple[str, int]]: """Connect and authenticate home assistant.""" hub = RoonHub(hass) @@ -122,13 +123,15 @@ class RoonConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the Roon flow.""" self._host = None self._port = None - self._servers = [] + self._servers: list[tuple[str, int]] = [] - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Get roon core details via discovery.""" self._servers = await discover(self.hass) diff --git a/homeassistant/components/roon/icons.json b/homeassistant/components/roon/icons.json index 571ca3f45a2..1e1dd42b765 100644 --- a/homeassistant/components/roon/icons.json +++ b/homeassistant/components/roon/icons.json @@ -1,5 +1,7 @@ { "services": { - "transfer": "mdi:monitor-multiple" + "transfer": { + "service": "mdi:monitor-multiple" + } } } diff --git a/homeassistant/components/route53/icons.json b/homeassistant/components/route53/icons.json index 30a854991f0..5afe13ce949 100644 --- a/homeassistant/components/route53/icons.json +++ b/homeassistant/components/route53/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_records": "mdi:database-refresh" + "update_records": { + "service": "mdi:database-refresh" + } } } diff --git a/homeassistant/components/rova/config_flow.py b/homeassistant/components/rova/config_flow.py index e5e3a31b8af..a28e6202466 100644 --- a/homeassistant/components/rova/config_flow.py +++ b/homeassistant/components/rova/config_flow.py @@ -60,11 +60,11 @@ class RovaConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import the yaml config.""" - zip_code = user_input[CONF_ZIP_CODE] - number = user_input[CONF_HOUSE_NUMBER] - suffix = user_input[CONF_HOUSE_NUMBER_SUFFIX] + zip_code = import_data[CONF_ZIP_CODE] + number = import_data[CONF_HOUSE_NUMBER] + suffix = import_data[CONF_HOUSE_NUMBER_SUFFIX] await self.async_set_unique_id(f"{zip_code}{number}{suffix}".strip()) self._abort_if_unique_id_configured() diff --git a/homeassistant/components/rss_feed_template/__init__.py b/homeassistant/components/rss_feed_template/__init__.py index debff5a6e96..89624c922e6 100644 --- a/homeassistant/components/rss_feed_template/__init__.py +++ b/homeassistant/components/rss_feed_template/__init__.py @@ -49,18 +49,8 @@ def setup(hass: HomeAssistant, config: ConfigType) -> bool: requires_auth: bool = feedconfig["requires_api_password"] - title: Template | None - if (title := feedconfig.get("title")) is not None: - title.hass = hass - items: list[dict[str, Template]] = feedconfig["items"] - for item in items: - if "title" in item: - item["title"].hass = hass - if "description" in item: - item["description"].hass = hass - - rss_view = RssView(url, requires_auth, title, items) + rss_view = RssView(url, requires_auth, feedconfig.get("title"), items) hass.http.register_view(rss_view) return True diff --git a/homeassistant/components/ruckus_unleashed/manifest.json b/homeassistant/components/ruckus_unleashed/manifest.json index edaf0aa95d2..039840efc14 100644 --- a/homeassistant/components/ruckus_unleashed/manifest.json +++ b/homeassistant/components/ruckus_unleashed/manifest.json @@ -6,6 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/ruckus_unleashed", "integration_type": "hub", "iot_class": "local_polling", - "loggers": ["aioruckus", "xmltodict"], - "requirements": ["aioruckus==0.34"] + "loggers": ["aioruckus"], + "requirements": ["aioruckus==0.41"] } diff --git a/homeassistant/components/russound_rio/__init__.py b/homeassistant/components/russound_rio/__init__.py index 1560a4cd332..8627c636ef2 100644 --- a/homeassistant/components/russound_rio/__init__.py +++ b/homeassistant/components/russound_rio/__init__.py @@ -7,8 +7,8 @@ from aiorussound import Russound from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT, Platform -from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryError +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import ConfigEntryNotReady from .const import CONNECT_TIMEOUT, RUSSOUND_RIO_EXCEPTIONS @@ -22,13 +22,28 @@ type RussoundConfigEntry = ConfigEntry[Russound] async def async_setup_entry(hass: HomeAssistant, entry: RussoundConfigEntry) -> bool: """Set up a config entry.""" - russ = Russound(hass.loop, entry.data[CONF_HOST], entry.data[CONF_PORT]) + host = entry.data[CONF_HOST] + port = entry.data[CONF_PORT] + russ = Russound(hass.loop, host, port) + + @callback + def is_connected_updated(connected: bool) -> None: + if connected: + _LOGGER.warning("Reconnected to controller at %s:%s", host, port) + else: + _LOGGER.warning( + "Disconnected from controller at %s:%s", + host, + port, + ) + + russ.add_connection_callback(is_connected_updated) try: async with asyncio.timeout(CONNECT_TIMEOUT): await russ.connect() except RUSSOUND_RIO_EXCEPTIONS as err: - raise ConfigEntryError(err) from err + raise ConfigEntryNotReady(f"Error while connecting to {host}:{port}") from err entry.runtime_data = russ diff --git a/homeassistant/components/russound_rio/config_flow.py b/homeassistant/components/russound_rio/config_flow.py index e25ac7dde2e..df173d29f61 100644 --- a/homeassistant/components/russound_rio/config_flow.py +++ b/homeassistant/components/russound_rio/config_flow.py @@ -80,13 +80,11 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Attempt to import the existing configuration.""" - self._async_abort_entries_match({CONF_HOST: import_config[CONF_HOST]}) - host = import_config[CONF_HOST] - port = import_config.get(CONF_PORT, 9621) + self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) + host = import_data[CONF_HOST] + port = import_data.get(CONF_PORT, 9621) # Connection logic is repeated here since this method will be removed in future releases russ = Russound(self.hass.loop, host, port) diff --git a/homeassistant/components/russound_rio/entity.py b/homeassistant/components/russound_rio/entity.py new file mode 100644 index 00000000000..0e4d5cf7dde --- /dev/null +++ b/homeassistant/components/russound_rio/entity.py @@ -0,0 +1,86 @@ +"""Base entity for Russound RIO integration.""" + +from collections.abc import Awaitable, Callable, Coroutine +from functools import wraps +from typing import Any, Concatenate + +from aiorussound import Controller + +from homeassistant.core import callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo +from homeassistant.helpers.entity import Entity + +from .const import DOMAIN, RUSSOUND_RIO_EXCEPTIONS + + +def command[_EntityT: RussoundBaseEntity, **_P]( + func: Callable[Concatenate[_EntityT, _P], Awaitable[None]], +) -> Callable[Concatenate[_EntityT, _P], Coroutine[Any, Any, None]]: + """Wrap async calls to raise on request error.""" + + @wraps(func) + async def decorator(self: _EntityT, *args: _P.args, **kwargs: _P.kwargs) -> None: + """Wrap all command methods.""" + try: + await func(self, *args, **kwargs) + except RUSSOUND_RIO_EXCEPTIONS as exc: + raise HomeAssistantError( + f"Error executing {func.__name__} on entity {self.entity_id}," + ) from exc + + return decorator + + +class RussoundBaseEntity(Entity): + """Russound Base Entity.""" + + _attr_has_entity_name = True + _attr_should_poll = False + + def __init__( + self, + controller: Controller, + ) -> None: + """Initialize the entity.""" + self._instance = controller.instance + self._controller = controller + self._primary_mac_address = ( + controller.mac_address or controller.parent_controller.mac_address + ) + self._device_identifier = ( + self._controller.mac_address + or f"{self._primary_mac_address}-{self._controller.controller_id}" + ) + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{self._instance.host}", + # Use MAC address of Russound device as identifier + identifiers={(DOMAIN, self._device_identifier)}, + manufacturer="Russound", + name=controller.controller_type, + model=controller.controller_type, + sw_version=controller.firmware_version, + ) + if controller.parent_controller: + self._attr_device_info["via_device"] = ( + DOMAIN, + controller.parent_controller.mac_address, + ) + else: + self._attr_device_info["connections"] = { + (CONNECTION_NETWORK_MAC, controller.mac_address) + } + + @callback + def _is_connected_updated(self, connected: bool) -> None: + """Update the state when the device is ready to receive commands or is unavailable.""" + self._attr_available = connected + self.async_write_ha_state() + + async def async_added_to_hass(self) -> None: + """Register callbacks.""" + self._instance.add_connection_callback(self._is_connected_updated) + + async def async_will_remove_from_hass(self) -> None: + """Remove callbacks.""" + self._instance.remove_connection_callback(self._is_connected_updated) diff --git a/homeassistant/components/russound_rio/manifest.json b/homeassistant/components/russound_rio/manifest.json index 67a01239615..6c473d94874 100644 --- a/homeassistant/components/russound_rio/manifest.json +++ b/homeassistant/components/russound_rio/manifest.json @@ -6,5 +6,6 @@ "documentation": "https://www.home-assistant.io/integrations/russound_rio", "iot_class": "local_push", "loggers": ["aiorussound"], - "requirements": ["aiorussound==2.2.3"] + "quality_scale": "silver", + "requirements": ["aiorussound==2.3.2"] } diff --git a/homeassistant/components/russound_rio/media_player.py b/homeassistant/components/russound_rio/media_player.py index ff0d9e006c0..20aaf0f3c08 100644 --- a/homeassistant/components/russound_rio/media_player.py +++ b/homeassistant/components/russound_rio/media_player.py @@ -17,13 +17,13 @@ from homeassistant.config_entries import SOURCE_IMPORT from homeassistant.const import EVENT_HOMEASSISTANT_STOP from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from . import RussoundConfigEntry from .const import DOMAIN, MP_FEATURES_BY_FLAG +from .entity import RussoundBaseEntity, command _LOGGER = logging.getLogger(__name__) @@ -107,13 +107,11 @@ async def async_setup_entry( async_add_entities(entities) -class RussoundZoneDevice(MediaPlayerEntity): +class RussoundZoneDevice(RussoundBaseEntity, MediaPlayerEntity): """Representation of a Russound Zone.""" _attr_device_class = MediaPlayerDeviceClass.SPEAKER _attr_media_content_type = MediaType.MUSIC - _attr_should_poll = False - _attr_has_entity_name = True _attr_supported_features = ( MediaPlayerEntityFeature.VOLUME_SET | MediaPlayerEntityFeature.VOLUME_STEP @@ -124,36 +122,11 @@ class RussoundZoneDevice(MediaPlayerEntity): def __init__(self, zone: Zone, sources: dict[int, Source]) -> None: """Initialize the zone device.""" - self._controller = zone.controller + super().__init__(zone.controller) self._zone = zone self._sources = sources self._attr_name = zone.name - primary_mac_address = ( - self._controller.mac_address - or self._controller.parent_controller.mac_address - ) - self._attr_unique_id = f"{primary_mac_address}-{zone.device_str()}" - device_identifier = ( - self._controller.mac_address - or f"{primary_mac_address}-{self._controller.controller_id}" - ) - self._attr_device_info = DeviceInfo( - # Use MAC address of Russound device as identifier - identifiers={(DOMAIN, device_identifier)}, - manufacturer="Russound", - name=self._controller.controller_type, - model=self._controller.controller_type, - sw_version=self._controller.firmware_version, - ) - if self._controller.parent_controller: - self._attr_device_info["via_device"] = ( - DOMAIN, - self._controller.parent_controller.mac_address, - ) - else: - self._attr_device_info["connections"] = { - (CONNECTION_NETWORK_MAC, self._controller.mac_address) - } + self._attr_unique_id = f"{self._primary_mac_address}-{zone.device_str()}" for flag, feature in MP_FEATURES_BY_FLAG.items(): if flag in zone.instance.supported_features: self._attr_supported_features |= feature @@ -167,8 +140,14 @@ class RussoundZoneDevice(MediaPlayerEntity): async def async_added_to_hass(self) -> None: """Register callback handlers.""" + await super().async_added_to_hass() self._zone.add_callback(self._callback_handler) + async def async_will_remove_from_hass(self) -> None: + """Remove callbacks.""" + await super().async_will_remove_from_hass() + self._zone.remove_callback(self._callback_handler) + def _current_source(self) -> Source: return self._zone.fetch_current_source() @@ -221,19 +200,23 @@ class RussoundZoneDevice(MediaPlayerEntity): """ return float(self._zone.volume or "0") / 50.0 + @command async def async_turn_off(self) -> None: """Turn off the zone.""" await self._zone.zone_off() + @command async def async_turn_on(self) -> None: """Turn on the zone.""" await self._zone.zone_on() + @command async def async_set_volume_level(self, volume: float) -> None: """Set the volume level.""" rvol = int(volume * 50.0) await self._zone.set_volume(rvol) + @command async def async_select_source(self, source: str) -> None: """Select the source input for this zone.""" for source_id, src in self._sources.items(): @@ -242,10 +225,12 @@ class RussoundZoneDevice(MediaPlayerEntity): await self._zone.select_source(source_id) break + @command async def async_volume_up(self) -> None: """Step the volume up.""" await self._zone.volume_up() + @command async def async_volume_down(self) -> None: """Step the volume down.""" await self._zone.volume_down() diff --git a/homeassistant/components/sabnzbd/config_flow.py b/homeassistant/components/sabnzbd/config_flow.py index 944c3f2936c..2637659e91a 100644 --- a/homeassistant/components/sabnzbd/config_flow.py +++ b/homeassistant/components/sabnzbd/config_flow.py @@ -65,7 +65,7 @@ class SABnzbdConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import sabnzbd config from configuration.yaml.""" protocol = "https://" if import_data[CONF_SSL] else "http://" import_data[CONF_URL] = ( diff --git a/homeassistant/components/sabnzbd/icons.json b/homeassistant/components/sabnzbd/icons.json index a693e9fec86..ca4f4d584ae 100644 --- a/homeassistant/components/sabnzbd/icons.json +++ b/homeassistant/components/sabnzbd/icons.json @@ -1,7 +1,13 @@ { "services": { - "pause": "mdi:pause", - "resume": "mdi:play", - "set_speed": "mdi:speedometer" + "pause": { + "service": "mdi:pause" + }, + "resume": { + "service": "mdi:play" + }, + "set_speed": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/samsungtv/__init__.py b/homeassistant/components/samsungtv/__init__.py index 992c86d5d7e..f3b967a485e 100644 --- a/homeassistant/components/samsungtv/__init__.py +++ b/homeassistant/components/samsungtv/__init__.py @@ -23,11 +23,7 @@ from homeassistant.const import ( ) from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady -from homeassistant.helpers import ( - config_validation as cv, - device_registry as dr, - entity_registry as er, -) +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.debounce import Debouncer from .bridge import ( @@ -53,7 +49,6 @@ from .coordinator import SamsungTVDataUpdateCoordinator PLATFORMS = [Platform.MEDIA_PLAYER, Platform.REMOTE] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) SamsungTVConfigEntry = ConfigEntry[SamsungTVDataUpdateCoordinator] diff --git a/homeassistant/components/scene/icons.json b/homeassistant/components/scene/icons.json index 563c0f31ddc..b08d06fb434 100644 --- a/homeassistant/components/scene/icons.json +++ b/homeassistant/components/scene/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "turn_on": "mdi:power", - "reload": "mdi:reload", - "apply": "mdi:check", - "create": "mdi:plus", - "delete": "mdi:delete" + "turn_on": { + "service": "mdi:power" + }, + "reload": { + "service": "mdi:reload" + }, + "apply": { + "service": "mdi:check" + }, + "create": { + "service": "mdi:plus" + }, + "delete": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/schedule/icons.json b/homeassistant/components/schedule/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/schedule/icons.json +++ b/homeassistant/components/schedule/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/scrape/sensor.py b/homeassistant/components/scrape/sensor.py index ceaf1e63a9d..dd84767ad41 100644 --- a/homeassistant/components/scrape/sensor.py +++ b/homeassistant/components/scrape/sensor.py @@ -67,10 +67,6 @@ async def async_setup_platform( entities: list[ScrapeSensor] = [] for sensor_config in sensors_config: - value_template: Template | None = sensor_config.get(CONF_VALUE_TEMPLATE) - if value_template is not None: - value_template.hass = hass - trigger_entity_config = {CONF_NAME: sensor_config[CONF_NAME]} for key in TRIGGER_ENTITY_OPTIONS: if key not in sensor_config: @@ -85,7 +81,7 @@ async def async_setup_platform( sensor_config[CONF_SELECT], sensor_config.get(CONF_ATTRIBUTE), sensor_config[CONF_INDEX], - value_template, + sensor_config.get(CONF_VALUE_TEMPLATE), True, ) ) diff --git a/homeassistant/components/screenlogic/icons.json b/homeassistant/components/screenlogic/icons.json index d8d021c20e6..ef8dc46f61d 100644 --- a/homeassistant/components/screenlogic/icons.json +++ b/homeassistant/components/screenlogic/icons.json @@ -1,7 +1,13 @@ { "services": { - "set_color_mode": "mdi:palette", - "start_super_chlorination": "mdi:pool", - "stop_super_chlorination": "mdi:pool" + "set_color_mode": { + "service": "mdi:palette" + }, + "start_super_chlorination": { + "service": "mdi:pool" + }, + "stop_super_chlorination": { + "service": "mdi:pool" + } } } diff --git a/homeassistant/components/script/icons.json b/homeassistant/components/script/icons.json index d253d0fd829..7e160941c05 100644 --- a/homeassistant/components/script/icons.json +++ b/homeassistant/components/script/icons.json @@ -8,9 +8,17 @@ } }, "services": { - "reload": "mdi:reload", - "turn_on": "mdi:script-text-play", - "turn_off": "mdi:script-text", - "toggle": "mdi:script-text" + "reload": { + "service": "mdi:reload" + }, + "turn_on": { + "service": "mdi:script-text-play" + }, + "turn_off": { + "service": "mdi:script-text" + }, + "toggle": { + "service": "mdi:script-text" + } } } diff --git a/homeassistant/components/select/__init__.py b/homeassistant/components/select/__init__.py index 27d41dafcd1..24f7d8bffea 100644 --- a/homeassistant/components/select/__init__.py +++ b/homeassistant/components/select/__init__.py @@ -66,13 +66,13 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SELECT_FIRST, - {}, + None, SelectEntity.async_first.__name__, ) component.async_register_entity_service( SERVICE_SELECT_LAST, - {}, + None, SelectEntity.async_last.__name__, ) diff --git a/homeassistant/components/select/icons.json b/homeassistant/components/select/icons.json index 1b440d2a1de..fbd1d4568f1 100644 --- a/homeassistant/components/select/icons.json +++ b/homeassistant/components/select/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "select_first": "mdi:format-list-bulleted", - "select_last": "mdi:format-list-bulleted", - "select_next": "mdi:format-list-bulleted", - "select_option": "mdi:format-list-bulleted", - "select_previous": "mdi:format-list-bulleted" + "select_first": { + "service": "mdi:format-list-bulleted" + }, + "select_last": { + "service": "mdi:format-list-bulleted" + }, + "select_next": { + "service": "mdi:format-list-bulleted" + }, + "select_option": { + "service": "mdi:format-list-bulleted" + }, + "select_previous": { + "service": "mdi:format-list-bulleted" + } } } diff --git a/homeassistant/components/sense/config_flow.py b/homeassistant/components/sense/config_flow.py index dab80b99e1a..222c6b30f79 100644 --- a/homeassistant/components/sense/config_flow.py +++ b/homeassistant/components/sense/config_flow.py @@ -3,7 +3,7 @@ from collections.abc import Mapping from functools import partial import logging -from typing import Any +from typing import TYPE_CHECKING, Any from sense_energy import ( ASyncSenseable, @@ -34,13 +34,12 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Init Config .""" - self._gateway = None - self._auth_data = {} - super().__init__() + self._gateway: ASyncSenseable | None = None + self._auth_data: dict[str, Any] = {} - async def validate_input(self, data): + async def validate_input(self, data: Mapping[str, Any]) -> None: """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. @@ -59,6 +58,8 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): client_session=client_session, ) ) + if TYPE_CHECKING: + assert self._gateway self._gateway.rate_limit = ACTIVE_UPDATE_RATE await self._gateway.authenticate( self._auth_data[CONF_EMAIL], self._auth_data[CONF_PASSWORD] @@ -79,7 +80,9 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_update_reload_and_abort(existing_entry, data=self._auth_data) - async def validate_input_and_create_entry(self, user_input, errors): + async def validate_input_and_create_entry( + self, user_input: Mapping[str, Any], errors: dict[str, str] + ) -> ConfigFlowResult | None: """Validate the input and create the entry from the data.""" try: await self.validate_input(user_input) @@ -118,9 +121,11 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if result := await self.validate_input_and_create_entry(user_input, errors): return result @@ -136,9 +141,11 @@ class SenseConfigFlow(ConfigFlow, domain=DOMAIN): self._auth_data = dict(entry_data) return await self.async_step_reauth_validate(entry_data) - async def async_step_reauth_validate(self, user_input=None): + async def async_step_reauth_validate( + self, user_input: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle reauth and validation.""" - errors = {} + errors: dict[str, str] = {} if user_input is not None: if result := await self.validate_input_and_create_entry(user_input, errors): return result diff --git a/homeassistant/components/sensibo/icons.json b/homeassistant/components/sensibo/icons.json index e26840e48eb..ccab3c198d2 100644 --- a/homeassistant/components/sensibo/icons.json +++ b/homeassistant/components/sensibo/icons.json @@ -45,10 +45,20 @@ } }, "services": { - "assume_state": "mdi:shape-outline", - "enable_timer": "mdi:timer-play", - "enable_pure_boost": "mdi:air-filter", - "full_state": "mdi:shape", - "enable_climate_react": "mdi:wizard-hat" + "assume_state": { + "service": "mdi:shape-outline" + }, + "enable_timer": { + "service": "mdi:timer-play" + }, + "enable_pure_boost": { + "service": "mdi:air-filter" + }, + "full_state": { + "service": "mdi:shape" + }, + "enable_climate_react": { + "service": "mdi:wizard-hat" + } } } diff --git a/homeassistant/components/sensibo/manifest.json b/homeassistant/components/sensibo/manifest.json index 5a195a8a4cc..610695aaf7b 100644 --- a/homeassistant/components/sensibo/manifest.json +++ b/homeassistant/components/sensibo/manifest.json @@ -15,5 +15,5 @@ "iot_class": "cloud_polling", "loggers": ["pysensibo"], "quality_scale": "platinum", - "requirements": ["pysensibo==1.0.36"] + "requirements": ["pysensibo==1.1.0"] } diff --git a/homeassistant/components/sensibo/sensor.py b/homeassistant/components/sensibo/sensor.py index 16adfd5afe3..a6a70ea6c49 100644 --- a/homeassistant/components/sensibo/sensor.py +++ b/homeassistant/components/sensibo/sensor.py @@ -7,7 +7,7 @@ from dataclasses import dataclass from datetime import datetime from typing import TYPE_CHECKING, Any -from pysensibo.model import MotionSensor, SensiboDevice +from pysensibo.model import MotionSensor, PureAQI, SensiboDevice from homeassistant.components.sensor import ( SensorDeviceClass, @@ -97,11 +97,11 @@ MOTION_SENSOR_TYPES: tuple[SensiboMotionSensorEntityDescription, ...] = ( PURE_SENSOR_TYPES: tuple[SensiboDeviceSensorEntityDescription, ...] = ( SensiboDeviceSensorEntityDescription( key="pm25", - device_class=SensorDeviceClass.PM25, - native_unit_of_measurement=CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, - state_class=SensorStateClass.MEASUREMENT, - value_fn=lambda data: data.pm25, + translation_key="pm25_pure", + device_class=SensorDeviceClass.ENUM, + value_fn=lambda data: data.pm25_pure.name.lower() if data.pm25_pure else None, extra_fn=None, + options=[aqi.name.lower() for aqi in PureAQI], ), SensiboDeviceSensorEntityDescription( key="pure_sensitivity", diff --git a/homeassistant/components/sensibo/strings.json b/homeassistant/components/sensibo/strings.json index d93c2a54adb..60a32028017 100644 --- a/homeassistant/components/sensibo/strings.json +++ b/homeassistant/components/sensibo/strings.json @@ -110,6 +110,14 @@ "s": "Sensitive" } }, + "pm25_pure": { + "name": "Pure AQI", + "state": { + "good": "Good", + "moderate": "Moderate", + "bad": "Bad" + } + }, "timer_time": { "name": "Timer end time" }, diff --git a/homeassistant/components/sensor/const.py b/homeassistant/components/sensor/const.py index 5acf2ecef23..8f63e346caf 100644 --- a/homeassistant/components/sensor/const.py +++ b/homeassistant/components/sensor/const.py @@ -14,7 +14,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, UnitOfApparentPower, @@ -31,6 +30,7 @@ from homeassistant.const import ( UnitOfPower, UnitOfPrecipitationDepth, UnitOfPressure, + UnitOfReactivePower, UnitOfSoundPressure, UnitOfSpeed, UnitOfTemperature, @@ -563,7 +563,7 @@ DEVICE_CLASS_UNITS: dict[SensorDeviceClass, set[type[StrEnum] | str | None]] = { SensorDeviceClass.PRECIPITATION: set(UnitOfPrecipitationDepth), SensorDeviceClass.PRECIPITATION_INTENSITY: set(UnitOfVolumetricFlux), SensorDeviceClass.PRESSURE: set(UnitOfPressure), - SensorDeviceClass.REACTIVE_POWER: {POWER_VOLT_AMPERE_REACTIVE}, + SensorDeviceClass.REACTIVE_POWER: {UnitOfReactivePower.VOLT_AMPERE_REACTIVE}, SensorDeviceClass.SIGNAL_STRENGTH: { SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, diff --git a/homeassistant/components/sensor/icons.json b/homeassistant/components/sensor/icons.json index f23826cfe95..6132fcbc1e9 100644 --- a/homeassistant/components/sensor/icons.json +++ b/homeassistant/components/sensor/icons.json @@ -18,6 +18,9 @@ "carbon_monoxide": { "default": "mdi:molecule-co" }, + "conductivity": { + "default": "mdi:sprout-outline" + }, "current": { "default": "mdi:current-ac" }, diff --git a/homeassistant/components/sensor/recorder.py b/homeassistant/components/sensor/recorder.py index c02c3ce7b7a..fce41a13ca6 100644 --- a/homeassistant/components/sensor/recorder.py +++ b/homeassistant/components/sensor/recorder.py @@ -33,6 +33,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant, State, split_entity_id from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers.entity import entity_sources +from homeassistant.helpers.typing import UNDEFINED, UndefinedType from homeassistant.loader import async_suggest_report_issue from homeassistant.util import dt as dt_util from homeassistant.util.enum import try_parse_enum @@ -220,13 +221,13 @@ def _normalize_states( LINK_DEV_STATISTICS, ) return None, [] - state_unit = fstates[0][1].attributes.get(ATTR_UNIT_OF_MEASUREMENT) + return state_unit, fstates converter = statistics.STATISTIC_UNIT_TO_UNIT_CONVERTER[statistics_unit] valid_fstates: list[tuple[float, State]] = [] convert: Callable[[float], float] | None = None - last_unit: str | None | object = object() + last_unit: str | None | UndefinedType = UNDEFINED valid_units = converter.VALID_UNITS for fstate, state in fstates: @@ -640,32 +641,31 @@ def list_statistic_ids( result: dict[str, StatisticMetaData] = {} for state in entities: - state_class = state.attributes[ATTR_STATE_CLASS] - state_unit = state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) + entity_id = state.entity_id + if statistic_ids is not None and entity_id not in statistic_ids: + continue + attributes = state.attributes + state_class = attributes[ATTR_STATE_CLASS] provided_statistics = DEFAULT_STATISTICS[state_class] if statistic_type is not None and statistic_type not in provided_statistics: continue - if statistic_ids is not None and state.entity_id not in statistic_ids: - continue - if ( - "sum" in provided_statistics - and ATTR_LAST_RESET not in state.attributes - and state.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT + (has_sum := "sum" in provided_statistics) + and ATTR_LAST_RESET not in attributes + and state_class == SensorStateClass.MEASUREMENT ): continue - result[state.entity_id] = { + result[entity_id] = { "has_mean": "mean" in provided_statistics, - "has_sum": "sum" in provided_statistics, + "has_sum": has_sum, "name": None, "source": RECORDER_DOMAIN, - "statistic_id": state.entity_id, - "unit_of_measurement": state_unit, + "statistic_id": entity_id, + "unit_of_measurement": attributes.get(ATTR_UNIT_OF_MEASUREMENT), } - continue return result diff --git a/homeassistant/components/sentry/__init__.py b/homeassistant/components/sentry/__init__.py index 8c042621db6..904d493a863 100644 --- a/homeassistant/components/sentry/__init__.py +++ b/homeassistant/components/sentry/__init__.py @@ -17,7 +17,7 @@ from homeassistant.const import ( __version__ as current_version, ) from homeassistant.core import HomeAssistant, get_release_channel -from homeassistant.helpers import config_validation as cv, entity_platform, instance_id +from homeassistant.helpers import entity_platform, instance_id from homeassistant.helpers.event import async_call_later from homeassistant.helpers.system_info import async_get_system_info from homeassistant.loader import Integration, async_get_custom_components @@ -36,12 +36,9 @@ from .const import ( DEFAULT_LOGGING_EVENT_LEVEL, DEFAULT_LOGGING_LEVEL, DEFAULT_TRACING_SAMPLE_RATE, - DOMAIN, ENTITY_COMPONENTS, ) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - LOGGER_INFO_REGEX = re.compile(r"^(\w+)\.?(\w+)?\.?(\w+)?\.?(\w+)?(?:\..*)?$") diff --git a/homeassistant/components/serial/sensor.py b/homeassistant/components/serial/sensor.py index e3fee36c09e..e7c39d97f6a 100644 --- a/homeassistant/components/serial/sensor.py +++ b/homeassistant/components/serial/sensor.py @@ -93,9 +93,7 @@ async def async_setup_platform( xonxoff = config.get(CONF_XONXOFF) rtscts = config.get(CONF_RTSCTS) dsrdtr = config.get(CONF_DSRDTR) - - if (value_template := config.get(CONF_VALUE_TEMPLATE)) is not None: - value_template.hass = hass + value_template = config.get(CONF_VALUE_TEMPLATE) sensor = SerialSensor( name, diff --git a/homeassistant/components/seventeentrack/icons.json b/homeassistant/components/seventeentrack/icons.json index 78ca65edc4d..94ca8cd535a 100644 --- a/homeassistant/components/seventeentrack/icons.json +++ b/homeassistant/components/seventeentrack/icons.json @@ -28,6 +28,8 @@ } }, "services": { - "get_packages": "mdi:package" + "get_packages": { + "service": "mdi:package" + } } } diff --git a/homeassistant/components/seventeentrack/manifest.json b/homeassistant/components/seventeentrack/manifest.json index c4be80ca506..a130fbe9aee 100644 --- a/homeassistant/components/seventeentrack/manifest.json +++ b/homeassistant/components/seventeentrack/manifest.json @@ -7,5 +7,5 @@ "integration_type": "service", "iot_class": "cloud_polling", "loggers": ["pyseventeentrack"], - "requirements": ["pyseventeentrack==1.0.0"] + "requirements": ["pyseventeentrack==1.0.1"] } diff --git a/homeassistant/components/sharkiq/config_flow.py b/homeassistant/components/sharkiq/config_flow.py index 492b8f2a365..87367fcf093 100644 --- a/homeassistant/components/sharkiq/config_flow.py +++ b/homeassistant/components/sharkiq/config_flow.py @@ -116,9 +116,15 @@ class SharkIqConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth if login is invalid.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by reauthentication.""" errors: dict[str, str] = {} if user_input is not None: @@ -134,7 +140,7 @@ class SharkIqConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_abort(reason=errors["base"]) return self.async_show_form( - step_id="reauth", + step_id="reauth_confirm", data_schema=SHARKIQ_SCHEMA, errors=errors, ) diff --git a/homeassistant/components/sharkiq/icons.json b/homeassistant/components/sharkiq/icons.json index 13fd58ce66d..e58a317f503 100644 --- a/homeassistant/components/sharkiq/icons.json +++ b/homeassistant/components/sharkiq/icons.json @@ -1,5 +1,7 @@ { "services": { - "clean_room": "mdi:robot-vacuum" + "clean_room": { + "service": "mdi:robot-vacuum" + } } } diff --git a/homeassistant/components/sharkiq/strings.json b/homeassistant/components/sharkiq/strings.json index 63d4f6af48b..40b569e13b7 100644 --- a/homeassistant/components/sharkiq/strings.json +++ b/homeassistant/components/sharkiq/strings.json @@ -13,7 +13,7 @@ "region": "Shark IQ uses different services in the EU. Select your region to connect to the correct service for your account." } }, - "reauth": { + "reauth_confirm": { "data": { "username": "[%key:common::config_flow::data::username%]", "password": "[%key:common::config_flow::data::password%]", diff --git a/homeassistant/components/shelly/__init__.py b/homeassistant/components/shelly/__init__.py index 994b1ed0430..e0d9d17d55d 100644 --- a/homeassistant/components/shelly/__init__.py +++ b/homeassistant/components/shelly/__init__.py @@ -21,6 +21,7 @@ from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import ( config_validation as cv, device_registry as dr, + entity_registry as er, issue_registry as ir, ) from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -192,8 +193,15 @@ async def _async_setup_block_entry( await hass.config_entries.async_forward_entry_setups( entry, runtime_data.platforms ) - elif sleep_period is None or device_entry is None: + elif ( + sleep_period is None + or device_entry is None + or not er.async_entries_for_device(er.async_get(hass), device_entry.id) + ): # Need to get sleep info or first time sleeping device setup, wait for device + # If there are no entities for the device, it means we added the device, but + # Home Assistant was restarted before the device was online. In this case we + # cannot restore the entities, so we need to wait for the device to be online. LOGGER.debug( "Setup for device %s will resume when device is online", entry.title ) @@ -268,13 +276,25 @@ async def _async_setup_rpc_entry(hass: HomeAssistant, entry: ShellyConfigEntry) await hass.config_entries.async_forward_entry_setups( entry, runtime_data.platforms ) - elif sleep_period is None or device_entry is None: + elif ( + sleep_period is None + or device_entry is None + or not er.async_entries_for_device(er.async_get(hass), device_entry.id) + ): # Need to get sleep info or first time sleeping device setup, wait for device + # If there are no entities for the device, it means we added the device, but + # Home Assistant was restarted before the device was online. In this case we + # cannot restore the entities, so we need to wait for the device to be online. LOGGER.debug( "Setup for device %s will resume when device is online", entry.title ) runtime_data.rpc = ShellyRpcCoordinator(hass, entry, device) runtime_data.rpc.async_setup(runtime_data.platforms) + # Try to connect to the device, if we reached here from config flow + # and user woke up the device when adding it, we can continue setup + # otherwise we will wait for the device to wake up + if sleep_period: + await runtime_data.rpc.async_device_online("setup") else: # Restore sensors for sleeping device LOGGER.debug("Setting up offline RPC device %s", entry.title) diff --git a/homeassistant/components/shelly/const.py b/homeassistant/components/shelly/const.py index 1759f4bdd18..fe4108a1f52 100644 --- a/homeassistant/components/shelly/const.py +++ b/homeassistant/components/shelly/const.py @@ -254,3 +254,6 @@ VIRTUAL_NUMBER_MODE_MAP = { "field": NumberMode.BOX, "slider": NumberMode.SLIDER, } + + +API_WS_URL = "/api/shelly/ws" diff --git a/homeassistant/components/shelly/coordinator.py b/homeassistant/components/shelly/coordinator.py index 6286e515727..c8e6cc03a06 100644 --- a/homeassistant/components/shelly/coordinator.py +++ b/homeassistant/components/shelly/coordinator.py @@ -6,6 +6,7 @@ import asyncio from collections.abc import Callable, Coroutine from dataclasses import dataclass from datetime import timedelta +from functools import cached_property from typing import Any, cast from aioshelly.ble import async_ensure_ble_enabled, async_stop_scanner @@ -64,6 +65,7 @@ from .utils import ( get_host, get_http_port, get_rpc_device_wakeup_period, + get_rpc_ws_url, update_device_fw_info, ) @@ -101,6 +103,9 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( self._pending_platforms: list[Platform] | None = None device_name = device.name if device.initialized else entry.title interval_td = timedelta(seconds=update_interval) + # The device has come online at least once. In the case of a sleeping RPC + # device, this means that the device has connected to the WS server at least once. + self._came_online_once = False super().__init__(hass, LOGGER, name=device_name, update_interval=interval_td) self._debounced_reload: Debouncer[Coroutine[Any, Any, None]] = Debouncer( @@ -116,12 +121,12 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, self._handle_ha_stop) ) - @property + @cached_property def model(self) -> str: """Model of the device.""" return cast(str, self.entry.data["model"]) - @property + @cached_property def mac(self) -> str: """Mac address of the device.""" return cast(str, self.entry.unique_id) @@ -169,7 +174,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( await self.device.initialize() update_device_fw_info(self.hass, self.device, self.entry) except DeviceConnectionError as err: - LOGGER.error( + LOGGER.debug( "Error connecting to Shelly device %s, error: %r", self.name, err ) return False @@ -184,7 +189,7 @@ class ShellyCoordinatorBase[_DeviceT: BlockDevice | RpcDevice]( if not self._pending_platforms: return True - LOGGER.debug("Device %s is online, resuming setup", self.entry.title) + LOGGER.debug("Device %s is online, resuming setup", self.name) platforms = self._pending_platforms self._pending_platforms = None @@ -372,6 +377,7 @@ class ShellyBlockCoordinator(ShellyCoordinatorBase[BlockDevice]): """Handle device update.""" LOGGER.debug("Shelly %s handle update, type: %s", self.name, update_type) if update_type is BlockUpdateType.ONLINE: + self._came_online_once = True self.entry.async_create_background_task( self.hass, self._async_device_connect_task(), @@ -472,9 +478,26 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): self._event_listeners: list[Callable[[dict[str, Any]], None]] = [] self._ota_event_listeners: list[Callable[[dict[str, Any]], None]] = [] self._input_event_listeners: list[Callable[[dict[str, Any]], None]] = [] - + self._connect_task: asyncio.Task | None = None entry.async_on_unload(entry.add_update_listener(self._async_update_listener)) + async def async_device_online(self, source: str) -> None: + """Handle device going online.""" + if not self.sleep_period: + await self.async_request_refresh() + elif not self._came_online_once or not self.device.initialized: + LOGGER.debug( + "Sleepy device %s is online (source: %s), trying to poll and configure", + self.name, + source, + ) + # Source told us the device is online, try to poll + # the device and if possible, set up the outbound + # websocket so the device will send us updates + # instead of relying on polling it fast enough before + # it goes to sleep again + self._async_handle_rpc_device_online() + def update_sleep_period(self) -> bool: """Check device sleep period & update if changed.""" if ( @@ -598,15 +621,15 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): async def _async_disconnected(self, reconnect: bool) -> None: """Handle device disconnected.""" - # Sleeping devices send data and disconnect - # There are no disconnect events for sleeping devices - if self.sleep_period: - return - async with self._connection_lock: if not self.connected: # Already disconnected return self.connected = False + # Sleeping devices send data and disconnect + # There are no disconnect events for sleeping devices + # but we do need to make sure self.connected is False + if self.sleep_period: + return self._async_run_disconnected_events() # Try to reconnect right away if triggered by disconnect event if reconnect: @@ -645,6 +668,21 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): """ if not self.sleep_period: await self._async_connect_ble_scanner() + else: + await self._async_setup_outbound_websocket() + + async def _async_setup_outbound_websocket(self) -> None: + """Set up outbound websocket if it is not enabled.""" + config = self.device.config + if ( + (ws_config := config.get("ws")) + and (not ws_config["server"] or not ws_config["enable"]) + and (ws_url := get_rpc_ws_url(self.hass)) + ): + LOGGER.debug( + "Setting up outbound websocket for device %s - %s", self.name, ws_url + ) + await self.device.update_outbound_websocket(ws_url) async def _async_connect_ble_scanner(self) -> None: """Connect BLE scanner.""" @@ -662,6 +700,21 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): await async_connect_scanner(self.hass, self, ble_scanner_mode) ) + @callback + def _async_handle_rpc_device_online(self) -> None: + """Handle device going online.""" + if self.device.connected or ( + self._connect_task and not self._connect_task.done() + ): + LOGGER.debug("Device %s already connected/connecting", self.name) + return + self._connect_task = self.entry.async_create_background_task( + self.hass, + self._async_device_connect_task(), + "rpc device online", + eager_start=True, + ) + @callback def _async_handle_update( self, device_: RpcDevice, update_type: RpcUpdateType @@ -669,15 +722,8 @@ class ShellyRpcCoordinator(ShellyCoordinatorBase[RpcDevice]): """Handle device update.""" LOGGER.debug("Shelly %s handle update, type: %s", self.name, update_type) if update_type is RpcUpdateType.ONLINE: - if self.device.connected: - LOGGER.debug("Device %s already connected", self.name) - return - self.entry.async_create_background_task( - self.hass, - self._async_device_connect_task(), - "rpc device online", - eager_start=True, - ) + self._came_online_once = True + self._async_handle_rpc_device_online() elif update_type is RpcUpdateType.INITIALIZED: self.entry.async_create_background_task( self.hass, self._async_connected(), "rpc device init", eager_start=True @@ -747,8 +793,7 @@ class ShellyRpcPollingCoordinator(ShellyCoordinatorBase[RpcDevice]): LOGGER.debug("Polling Shelly RPC Device - %s", self.name) try: - await self.device.update_status() - await self.device.get_dynamic_components() + await self.device.poll() except (DeviceConnectionError, RpcCallError) as err: raise UpdateFailed(f"Device disconnected: {err!r}") from err except InvalidAuthError: @@ -798,14 +843,13 @@ def get_rpc_coordinator_by_device_id( async def async_reconnect_soon(hass: HomeAssistant, entry: ShellyConfigEntry) -> None: """Try to reconnect soon.""" if ( - not entry.data.get(CONF_SLEEP_PERIOD) - and not hass.is_stopping - and entry.state == ConfigEntryState.LOADED + not hass.is_stopping + and entry.state is ConfigEntryState.LOADED and (coordinator := entry.runtime_data.rpc) ): entry.async_create_background_task( hass, - coordinator.async_request_refresh(), + coordinator.async_device_online("zeroconf"), "reconnect soon", eager_start=True, ) diff --git a/homeassistant/components/shelly/diagnostics.py b/homeassistant/components/shelly/diagnostics.py index e70b76a7c00..a5fe1f5b6c0 100644 --- a/homeassistant/components/shelly/diagnostics.py +++ b/homeassistant/components/shelly/diagnostics.py @@ -11,6 +11,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import format_mac from .coordinator import ShellyConfigEntry +from .utils import get_rpc_ws_url TO_REDACT = {CONF_USERNAME, CONF_PASSWORD} @@ -73,6 +74,12 @@ async def async_get_config_entry_diagnostics( device_settings = { k: v for k, v in rpc_coordinator.device.config.items() if k in ["cloud"] } + ws_config = rpc_coordinator.device.config["ws"] + device_settings["ws_outbound_enabled"] = ws_config["enable"] + if ws_config["enable"]: + device_settings["ws_outbound_server_valid"] = bool( + ws_config["server"] == get_rpc_ws_url(hass) + ) device_status = { k: v for k, v in rpc_coordinator.device.status.items() diff --git a/homeassistant/components/shelly/entity.py b/homeassistant/components/shelly/entity.py index 980a39feaba..aea060e09e2 100644 --- a/homeassistant/components/shelly/entity.py +++ b/homeassistant/components/shelly/entity.py @@ -488,7 +488,7 @@ class ShellyRestAttributeEntity(CoordinatorEntity[ShellyBlockCoordinator]): @property def attribute_value(self) -> StateType: """Value of sensor.""" - if callable(self.entity_description.value): + if self.entity_description.value is not None: self._last_value = self.entity_description.value( self.block_coordinator.device.status, self._last_value ) @@ -518,7 +518,7 @@ class ShellyRpcAttributeEntity(ShellyRpcEntity, Entity): id_key = key.split(":")[-1] self._id = int(id_key) if id_key.isnumeric() else None - if callable(description.unit): + if description.unit is not None: self._attr_native_unit_of_measurement = description.unit( coordinator.device.config[key] ) @@ -544,7 +544,7 @@ class ShellyRpcAttributeEntity(ShellyRpcEntity, Entity): @property def attribute_value(self) -> StateType: """Value of sensor.""" - if callable(self.entity_description.value): + if self.entity_description.value is not None: # using "get" here since subkey might not exist (e.g. "errors" sub_key) self._last_value = self.entity_description.value( self.status.get(self.entity_description.sub_key), self._last_value diff --git a/homeassistant/components/shelly/manifest.json b/homeassistant/components/shelly/manifest.json index da3bbc4bb6e..5e2522ea456 100644 --- a/homeassistant/components/shelly/manifest.json +++ b/homeassistant/components/shelly/manifest.json @@ -9,7 +9,7 @@ "iot_class": "local_push", "loggers": ["aioshelly"], "quality_scale": "platinum", - "requirements": ["aioshelly==11.2.4"], + "requirements": ["aioshelly==11.4.2"], "zeroconf": [ { "type": "_http._tcp.local.", diff --git a/homeassistant/components/shelly/number.py b/homeassistant/components/shelly/number.py index 67c33faf150..1e0f5b020ac 100644 --- a/homeassistant/components/shelly/number.py +++ b/homeassistant/components/shelly/number.py @@ -207,17 +207,17 @@ class RpcNumber(ShellyRpcAttributeEntity, NumberEntity): """Initialize sensor.""" super().__init__(coordinator, key, attribute, description) - if callable(description.max_fn): + if description.max_fn is not None: self._attr_native_max_value = description.max_fn( coordinator.device.config[key] ) - if callable(description.min_fn): + if description.min_fn is not None: self._attr_native_min_value = description.min_fn( coordinator.device.config[key] ) - if callable(description.step_fn): + if description.step_fn is not None: self._attr_native_step = description.step_fn(coordinator.device.config[key]) - if callable(description.mode_fn): + if description.mode_fn is not None: self._attr_mode = description.mode_fn(coordinator.device.config[key]) @property diff --git a/homeassistant/components/shelly/sensor.py b/homeassistant/components/shelly/sensor.py index 8c1333a989c..1ef174119e4 100644 --- a/homeassistant/components/shelly/sensor.py +++ b/homeassistant/components/shelly/sensor.py @@ -392,6 +392,22 @@ RPC_SENSORS: Final = { device_class=SensorDeviceClass.POWER, state_class=SensorStateClass.MEASUREMENT, ), + "power_rgb": RpcSensorDescription( + key="rgb", + sub_key="apower", + name="Power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), + "power_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="apower", + name="Power", + native_unit_of_measurement=UnitOfPower.WATT, + device_class=SensorDeviceClass.POWER, + state_class=SensorStateClass.MEASUREMENT, + ), "a_act_power": RpcSensorDescription( key="em", sub_key="a_act_power", @@ -536,6 +552,28 @@ RPC_SENSORS: Final = { state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + "voltage_rgb": RpcSensorDescription( + key="rgb", + sub_key="voltage", + name="Voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value=lambda status, _: None if status is None else float(status), + suggested_display_precision=1, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + "voltage_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="voltage", + name="Voltage", + native_unit_of_measurement=UnitOfElectricPotential.VOLT, + value=lambda status, _: None if status is None else float(status), + suggested_display_precision=1, + device_class=SensorDeviceClass.VOLTAGE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), "a_voltage": RpcSensorDescription( key="em", sub_key="a_voltage", @@ -603,6 +641,26 @@ RPC_SENSORS: Final = { state_class=SensorStateClass.MEASUREMENT, entity_registry_enabled_default=False, ), + "current_rgb": RpcSensorDescription( + key="rgb", + sub_key="current", + name="Current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value=lambda status, _: None if status is None else float(status), + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), + "current_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="current", + name="Current", + native_unit_of_measurement=UnitOfElectricCurrent.AMPERE, + value=lambda status, _: None if status is None else float(status), + device_class=SensorDeviceClass.CURRENT, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + ), "a_current": RpcSensorDescription( key="em", sub_key="a_current", @@ -683,6 +741,28 @@ RPC_SENSORS: Final = { device_class=SensorDeviceClass.ENERGY, state_class=SensorStateClass.TOTAL_INCREASING, ), + "energy_rgb": RpcSensorDescription( + key="rgb", + sub_key="aenergy", + name="Energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value=lambda status, _: status["total"], + suggested_display_precision=2, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), + "energy_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="aenergy", + name="Energy", + native_unit_of_measurement=UnitOfEnergy.WATT_HOUR, + suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + value=lambda status, _: status["total"], + suggested_display_precision=2, + device_class=SensorDeviceClass.ENERGY, + state_class=SensorStateClass.TOTAL_INCREASING, + ), "total_act": RpcSensorDescription( key="emdata", sub_key="total_act", @@ -895,6 +975,32 @@ RPC_SENSORS: Final = { entity_category=EntityCategory.DIAGNOSTIC, use_polling_coordinator=True, ), + "temperature_rgb": RpcSensorDescription( + key="rgb", + sub_key="temperature", + name="Device temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value=lambda status, _: status["tC"], + suggested_display_precision=1, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + entity_category=EntityCategory.DIAGNOSTIC, + use_polling_coordinator=True, + ), + "temperature_rgbw": RpcSensorDescription( + key="rgbw", + sub_key="temperature", + name="Device temperature", + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + value=lambda status, _: status["tC"], + suggested_display_precision=1, + device_class=SensorDeviceClass.TEMPERATURE, + state_class=SensorStateClass.MEASUREMENT, + entity_registry_enabled_default=False, + entity_category=EntityCategory.DIAGNOSTIC, + use_polling_coordinator=True, + ), "temperature_0": RpcSensorDescription( key="temperature", sub_key="tC", @@ -976,6 +1082,7 @@ RPC_SENSORS: Final = { or config[key]["enable"] is False or status[key].get("xpercent") is None ), + unit=lambda config: config["xpercent"]["unit"] or None, ), "pulse_counter": RpcSensorDescription( key="input", @@ -998,6 +1105,7 @@ RPC_SENSORS: Final = { or config[key]["enable"] is False or status[key]["counts"].get("xtotal") is None ), + unit=lambda config: config["xcounts"]["unit"] or None, ), "counter_frequency": RpcSensorDescription( key="input", @@ -1018,6 +1126,7 @@ RPC_SENSORS: Final = { or config[key]["enable"] is False or status[key].get("xfreq") is None ), + unit=lambda config: config["xfreq"]["unit"] or None, ), "text": RpcSensorDescription( key="text", @@ -1157,13 +1266,15 @@ class RpcSensor(ShellyRpcAttributeEntity, SensorEntity): @property def native_value(self) -> StateType: """Return value of sensor.""" - if not self.option_map: - return self.attribute_value + attribute_value = self.attribute_value - if not isinstance(self.attribute_value, str): + if not self.option_map: + return attribute_value + + if not isinstance(attribute_value, str): return None - return self.option_map[self.attribute_value] + return self.option_map[attribute_value] class BlockSleepingSensor(ShellySleepingBlockAttributeEntity, RestoreSensor): diff --git a/homeassistant/components/shelly/utils.py b/homeassistant/components/shelly/utils.py index 339f6781171..d0a8a1230c5 100644 --- a/homeassistant/components/shelly/utils.py +++ b/homeassistant/components/shelly/utils.py @@ -23,6 +23,7 @@ from aioshelly.const import ( RPC_GENERATIONS, ) from aioshelly.rpc_device import RpcDevice, WsServer +from yarl import URL from homeassistant.components import network from homeassistant.components.http import HomeAssistantView @@ -36,9 +37,11 @@ from homeassistant.helpers import ( singleton, ) from homeassistant.helpers.device_registry import CONNECTION_NETWORK_MAC +from homeassistant.helpers.network import NoURLAvailableError, get_url from homeassistant.util.dt import utcnow from .const import ( + API_WS_URL, BASIC_INPUTS_EVENTS_TYPES, CONF_COAP_PORT, CONF_GEN, @@ -254,7 +257,7 @@ class ShellyReceiver(HomeAssistantView): """Handle pushes from Shelly Gen2 devices.""" requires_auth = False - url = "/api/shelly/ws" + url = API_WS_URL name = "api:shelly:ws" def __init__(self, ws_server: WsServer) -> None: @@ -571,3 +574,15 @@ def async_remove_orphaned_virtual_entities( if orphaned_entities: async_remove_shelly_rpc_entities(hass, platform, mac, orphaned_entities) + + +def get_rpc_ws_url(hass: HomeAssistant) -> str | None: + """Return the RPC websocket URL.""" + try: + raw_url = get_url(hass, prefer_external=False, allow_cloud=False) + except NoURLAvailableError: + LOGGER.debug("URL not available, skipping outbound websocket setup") + return None + url = URL(raw_url) + ws_url = url.with_scheme("wss" if url.scheme == "https" else "ws") + return str(ws_url.joinpath(API_WS_URL.removeprefix("/"))) diff --git a/homeassistant/components/shopping_list/icons.json b/homeassistant/components/shopping_list/icons.json index 7de3eb1b948..9b3d8a08a79 100644 --- a/homeassistant/components/shopping_list/icons.json +++ b/homeassistant/components/shopping_list/icons.json @@ -7,13 +7,29 @@ } }, "services": { - "add_item": "mdi:cart-plus", - "remove_item": "mdi:cart-remove", - "complete_item": "mdi:cart-check", - "incomplete_item": "mdi:cart-off", - "complete_all": "mdi:cart-check", - "incomplete_all": "mdi:cart-off", - "clear_completed_items": "mdi:cart-remove", - "sort": "mdi:sort" + "add_item": { + "service": "mdi:cart-plus" + }, + "remove_item": { + "service": "mdi:cart-remove" + }, + "complete_item": { + "service": "mdi:cart-check" + }, + "incomplete_item": { + "service": "mdi:cart-off" + }, + "complete_all": { + "service": "mdi:cart-check" + }, + "incomplete_all": { + "service": "mdi:cart-off" + }, + "clear_completed_items": { + "service": "mdi:cart-remove" + }, + "sort": { + "service": "mdi:sort" + } } } diff --git a/homeassistant/components/signal_messenger/notify.py b/homeassistant/components/signal_messenger/notify.py index 21d42f8912f..9321bc3232f 100644 --- a/homeassistant/components/signal_messenger/notify.py +++ b/homeassistant/components/signal_messenger/notify.py @@ -166,7 +166,7 @@ class SignalNotificationService(BaseNotificationService): and int(str(resp.headers.get("Content-Length"))) > attachment_size_limit ): - raise ValueError( + raise ValueError( # noqa: TRY301 "Attachment too large (Content-Length reports {}). Max size: {}" " bytes".format( int(str(resp.headers.get("Content-Length"))), @@ -179,7 +179,7 @@ class SignalNotificationService(BaseNotificationService): for chunk in resp.iter_content(1024): size += len(chunk) if size > attachment_size_limit: - raise ValueError( + raise ValueError( # noqa: TRY301 f"Attachment too large (Stream reports {size}). " f"Max size: {CONF_MAX_ALLOWED_DOWNLOAD_SIZE_BYTES} bytes" ) diff --git a/homeassistant/components/simplefin/__init__.py b/homeassistant/components/simplefin/__init__.py index 0aa33dec9ac..c47b3118415 100644 --- a/homeassistant/components/simplefin/__init__.py +++ b/homeassistant/components/simplefin/__init__.py @@ -11,7 +11,10 @@ from homeassistant.core import HomeAssistant from .const import CONF_ACCESS_URL from .coordinator import SimpleFinDataUpdateCoordinator -PLATFORMS: list[str] = [Platform.SENSOR] +PLATFORMS: list[str] = [ + Platform.BINARY_SENSOR, + Platform.SENSOR, +] type SimpleFinConfigEntry = ConfigEntry[SimpleFinDataUpdateCoordinator] diff --git a/homeassistant/components/simplefin/binary_sensor.py b/homeassistant/components/simplefin/binary_sensor.py new file mode 100644 index 00000000000..5805fc370b6 --- /dev/null +++ b/homeassistant/components/simplefin/binary_sensor.py @@ -0,0 +1,68 @@ +"""Binary Sensor for SimpleFin.""" + +from collections.abc import Callable +from dataclasses import dataclass + +from simplefin4py import Account + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SimpleFinConfigEntry +from .entity import SimpleFinEntity + + +@dataclass(frozen=True, kw_only=True) +class SimpleFinBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes a sensor entity.""" + + value_fn: Callable[[Account], bool] + + +SIMPLEFIN_BINARY_SENSORS: tuple[SimpleFinBinarySensorEntityDescription, ...] = ( + SimpleFinBinarySensorEntityDescription( + key="possible_error", + translation_key="possible_error", + device_class=BinarySensorDeviceClass.PROBLEM, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=lambda account: account.possible_error, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: SimpleFinConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SimpleFIN sensors for config entries.""" + + sf_coordinator = config_entry.runtime_data + accounts = sf_coordinator.data.accounts + + async_add_entities( + SimpleFinBinarySensor( + sf_coordinator, + sensor_description, + account, + ) + for account in accounts + for sensor_description in SIMPLEFIN_BINARY_SENSORS + ) + + +class SimpleFinBinarySensor(SimpleFinEntity, BinarySensorEntity): + """Extends IntellifireEntity with Binary Sensor specific logic.""" + + entity_description: SimpleFinBinarySensorEntityDescription + + @property + def is_on(self) -> bool: + """Use this to get the correct value.""" + return self.entity_description.value_fn(self.account_data) diff --git a/homeassistant/components/simplefin/strings.json b/homeassistant/components/simplefin/strings.json index d6690e604c5..3ac03fe2cc0 100644 --- a/homeassistant/components/simplefin/strings.json +++ b/homeassistant/components/simplefin/strings.json @@ -21,6 +21,9 @@ } }, "entity": { + "binary_sensor": { + "possible_error": { "name": "Possible error" } + }, "sensor": { "balance": { "name": "Balance" diff --git a/homeassistant/components/simplisafe/__init__.py b/homeassistant/components/simplisafe/__init__.py index 29f53eafffb..b23358c985f 100644 --- a/homeassistant/components/simplisafe/__init__.py +++ b/homeassistant/components/simplisafe/__init__.py @@ -212,8 +212,6 @@ WEBSOCKET_EVENTS_TO_FIRE_HASS_EVENT = [ EVENT_USER_INITIATED_TEST, ] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - @callback def _async_get_system_for_service_call( diff --git a/homeassistant/components/simplisafe/config_flow.py b/homeassistant/components/simplisafe/config_flow.py index c0d98c5644f..6fdbd351a29 100644 --- a/homeassistant/components/simplisafe/config_flow.py +++ b/homeassistant/components/simplisafe/config_flow.py @@ -69,7 +69,9 @@ class SimpliSafeFlowHandler(ConfigFlow, domain=DOMAIN): """Define the config flow to handle options.""" return SimpliSafeOptionsFlowHandler(config_entry) - async def async_step_reauth(self, config: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle configuration by re-auth.""" self._reauth = True return await self.async_step_user() diff --git a/homeassistant/components/simplisafe/icons.json b/homeassistant/components/simplisafe/icons.json index 60ddb7f0982..8552993210f 100644 --- a/homeassistant/components/simplisafe/icons.json +++ b/homeassistant/components/simplisafe/icons.json @@ -1,7 +1,13 @@ { "services": { - "remove_pin": "mdi:alarm-panel-outline", - "set_pin": "mdi:alarm-panel", - "set_system_properties": "mdi:cog" + "remove_pin": { + "service": "mdi:alarm-panel-outline" + }, + "set_pin": { + "service": "mdi:alarm-panel" + }, + "set_system_properties": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/siren/__init__.py b/homeassistant/components/siren/__init__.py index 216e111b7db..801ca4f2bee 100644 --- a/homeassistant/components/siren/__init__.py +++ b/homeassistant/components/siren/__init__.py @@ -129,11 +129,11 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: [SirenEntityFeature.TURN_ON], ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [SirenEntityFeature.TURN_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [SirenEntityFeature.TURN_OFF] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [SirenEntityFeature.TURN_ON | SirenEntityFeature.TURN_OFF], ) diff --git a/homeassistant/components/siren/icons.json b/homeassistant/components/siren/icons.json index 0083a2540c7..75caf6417da 100644 --- a/homeassistant/components/siren/icons.json +++ b/homeassistant/components/siren/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "toggle": "mdi:bullhorn", - "turn_off": "mdi:bullhorn", - "turn_on": "mdi:bullhorn" + "toggle": { + "service": "mdi:bullhorn" + }, + "turn_off": { + "service": "mdi:bullhorn" + }, + "turn_on": { + "service": "mdi:bullhorn" + } } } diff --git a/homeassistant/components/sisyphus/manifest.json b/homeassistant/components/sisyphus/manifest.json index dbb40344d66..4e344c0b25e 100644 --- a/homeassistant/components/sisyphus/manifest.json +++ b/homeassistant/components/sisyphus/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/sisyphus", "iot_class": "local_push", "loggers": ["sisyphus_control"], - "requirements": ["sisyphus-control==3.1.3"] + "requirements": ["sisyphus-control==3.1.4"] } diff --git a/homeassistant/components/slack/notify.py b/homeassistant/components/slack/notify.py index a18b211962a..28f9dd203ff 100644 --- a/homeassistant/components/slack/notify.py +++ b/homeassistant/components/slack/notify.py @@ -291,7 +291,6 @@ class SlackNotificationService(BaseNotificationService): if ATTR_FILE not in data: if ATTR_BLOCKS_TEMPLATE in data: value = cv.template_complex(data[ATTR_BLOCKS_TEMPLATE]) - template.attach(self._hass, value) blocks = template.render_complex(value) elif ATTR_BLOCKS in data: blocks = data[ATTR_BLOCKS] diff --git a/homeassistant/components/sleepiq/config_flow.py b/homeassistant/components/sleepiq/config_flow.py index 4a4813192c3..26f3672d588 100644 --- a/homeassistant/components/sleepiq/config_flow.py +++ b/homeassistant/components/sleepiq/config_flow.py @@ -28,22 +28,20 @@ class SleepIQFlowHandler(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self._reauth_entry: ConfigEntry | None = None - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a SleepIQ account as a config entry. This flow is triggered by 'async_setup' for configured accounts. """ - await self.async_set_unique_id(import_config[CONF_USERNAME].lower()) + await self.async_set_unique_id(import_data[CONF_USERNAME].lower()) self._abort_if_unique_id_configured() - if error := await try_connection(self.hass, import_config): + if error := await try_connection(self.hass, import_data): _LOGGER.error("Could not authenticate with SleepIQ server: %s", error) return self.async_abort(reason=error) return self.async_create_entry( - title=import_config[CONF_USERNAME], data=import_config + title=import_data[CONF_USERNAME], data=import_data ) async def async_step_user( diff --git a/homeassistant/components/sma/sensor.py b/homeassistant/components/sma/sensor.py index 9d580a76d9e..302c4f6b197 100644 --- a/homeassistant/components/sma/sensor.py +++ b/homeassistant/components/sma/sensor.py @@ -15,7 +15,6 @@ from homeassistant.components.sensor import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, EntityCategory, UnitOfApparentPower, UnitOfElectricCurrent, @@ -23,6 +22,7 @@ from homeassistant.const import ( UnitOfEnergy, UnitOfFrequency, UnitOfPower, + UnitOfReactivePower, UnitOfTemperature, ) from homeassistant.core import HomeAssistant @@ -204,7 +204,7 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = { "grid_reactive_power": SensorEntityDescription( key="grid_reactive_power", name="Grid Reactive Power", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.REACTIVE_POWER, entity_registry_enabled_default=False, @@ -212,7 +212,7 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = { "grid_reactive_power_l1": SensorEntityDescription( key="grid_reactive_power_l1", name="Grid Reactive Power L1", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.REACTIVE_POWER, entity_registry_enabled_default=False, @@ -220,7 +220,7 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = { "grid_reactive_power_l2": SensorEntityDescription( key="grid_reactive_power_l2", name="Grid Reactive Power L2", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.REACTIVE_POWER, entity_registry_enabled_default=False, @@ -228,7 +228,7 @@ SENSOR_ENTITIES: dict[str, SensorEntityDescription] = { "grid_reactive_power_l3": SensorEntityDescription( key="grid_reactive_power_l3", name="Grid Reactive Power L3", - native_unit_of_measurement=POWER_VOLT_AMPERE_REACTIVE, + native_unit_of_measurement=UnitOfReactivePower.VOLT_AMPERE_REACTIVE, state_class=SensorStateClass.MEASUREMENT, device_class=SensorDeviceClass.REACTIVE_POWER, entity_registry_enabled_default=False, diff --git a/homeassistant/components/smappee/config_flow.py b/homeassistant/components/smappee/config_flow.py index 6ed18905233..d5073bd9c34 100644 --- a/homeassistant/components/smappee/config_flow.py +++ b/homeassistant/components/smappee/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Smappee.""" import logging +from typing import Any from pysmappee import helper, mqtt import voluptuous as vol @@ -106,7 +107,9 @@ class SmappeeFlowHandler( data={CONF_IP_ADDRESS: ip_address, CONF_SERIALNUMBER: serial_number}, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" # If there is a CLOUD entry already, abort a new LOCAL entry diff --git a/homeassistant/components/smart_meter_texas/config_flow.py b/homeassistant/components/smart_meter_texas/config_flow.py index bbe1361b795..b60855b62c8 100644 --- a/homeassistant/components/smart_meter_texas/config_flow.py +++ b/homeassistant/components/smart_meter_texas/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Smart Meter Texas integration.""" import logging +from typing import Any from aiohttp import ClientError from smart_meter_texas import Account, Client, ClientSSLContext @@ -10,7 +11,7 @@ from smart_meter_texas.exceptions import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -52,7 +53,9 @@ class SMTConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/smartthings/config_flow.py b/homeassistant/components/smartthings/config_flow.py index 2ecc3375026..df5b7a8acfa 100644 --- a/homeassistant/components/smartthings/config_flow.py +++ b/homeassistant/components/smartthings/config_flow.py @@ -2,13 +2,14 @@ from http import HTTPStatus import logging +from typing import Any from aiohttp import ClientResponseError from pysmartthings import APIResponseError, AppOAuth, SmartThings from pysmartthings.installedapp import format_install_url import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -53,11 +54,13 @@ class SmartThingsFlowHandler(ConfigFlow, domain=DOMAIN): self.location_id = None self.endpoints_initialized = False - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Occurs when a previously entry setup fails and is re-initiated.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Validate and confirm webhook setup.""" if not self.endpoints_initialized: self.endpoints_initialized = True diff --git a/homeassistant/components/smarttub/config_flow.py b/homeassistant/components/smarttub/config_flow.py index 60f14b03e45..827375c907c 100644 --- a/homeassistant/components/smarttub/config_flow.py +++ b/homeassistant/components/smarttub/config_flow.py @@ -3,7 +3,7 @@ from __future__ import annotations from collections.abc import Mapping -from typing import Any +from typing import TYPE_CHECKING, Any from smarttub import LoginFailed import voluptuous as vol @@ -30,7 +30,9 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): self._reauth_input: Mapping[str, Any] | None = None self._reauth_entry: ConfigEntry | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} @@ -53,6 +55,8 @@ class SmartTubConfigFlow(ConfigFlow, domain=DOMAIN): ) # this is a reauth attempt + if TYPE_CHECKING: + assert self._reauth_entry if self._reauth_entry.unique_id != self.unique_id: # there is a config entry matching this account, # but it is not the one we were trying to reauth diff --git a/homeassistant/components/smarttub/icons.json b/homeassistant/components/smarttub/icons.json index 7ae96d03383..2b89445754c 100644 --- a/homeassistant/components/smarttub/icons.json +++ b/homeassistant/components/smarttub/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_primary_filtration": "mdi:filter", - "set_secondary_filtration": "mdi:filter-multiple", - "snooze_reminder": "mdi:timer-pause", - "reset_reminder": "mdi:timer-sync" + "set_primary_filtration": { + "service": "mdi:filter" + }, + "set_secondary_filtration": { + "service": "mdi:filter-multiple" + }, + "snooze_reminder": { + "service": "mdi:timer-pause" + }, + "reset_reminder": { + "service": "mdi:timer-sync" + } } } diff --git a/homeassistant/components/smlight/__init__.py b/homeassistant/components/smlight/__init__.py new file mode 100644 index 00000000000..16eb60b9c87 --- /dev/null +++ b/homeassistant/components/smlight/__init__.py @@ -0,0 +1,30 @@ +"""SMLIGHT SLZB Zigbee device integration.""" + +from __future__ import annotations + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_HOST, Platform +from homeassistant.core import HomeAssistant + +from .coordinator import SmDataUpdateCoordinator + +PLATFORMS: list[Platform] = [ + Platform.SENSOR, +] +type SmConfigEntry = ConfigEntry[SmDataUpdateCoordinator] + + +async def async_setup_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: + """Set up SMLIGHT Zigbee from a config entry.""" + coordinator = SmDataUpdateCoordinator(hass, entry.data[CONF_HOST]) + await coordinator.async_config_entry_first_refresh() + entry.runtime_data = coordinator + + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: SmConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/smlight/config_flow.py b/homeassistant/components/smlight/config_flow.py new file mode 100644 index 00000000000..1b8cc4efeb1 --- /dev/null +++ b/homeassistant/components/smlight/config_flow.py @@ -0,0 +1,151 @@ +"""Config flow for SMLIGHT Zigbee integration.""" + +from __future__ import annotations + +from typing import Any + +from pysmlight import Api2 +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +import voluptuous as vol + +from homeassistant.components import zeroconf +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PASSWORD, CONF_USERNAME +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import format_mac + +from .const import DOMAIN + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_HOST): str, + } +) + +STEP_AUTH_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class SmlightConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for SMLIGHT Zigbee.""" + + def __init__(self) -> None: + """Initialize the config flow.""" + self.client: Api2 + self.host: str | None = None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step.""" + errors: dict[str, str] = {} + + if user_input is not None: + host = user_input[CONF_HOST] + self.client = Api2(host, session=async_get_clientsession(self.hass)) + self.host = host + + try: + if not await self._async_check_auth_required(user_input): + return await self._async_complete_entry(user_input) + except SmlightConnectionError: + errors["base"] = "cannot_connect" + except SmlightAuthError: + return await self.async_step_auth() + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) + + async def async_step_auth( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle authentication to SLZB-06 device.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + if not await self._async_check_auth_required(user_input): + return await self._async_complete_entry(user_input) + except SmlightConnectionError: + return self.async_abort(reason="cannot_connect") + except SmlightAuthError: + errors["base"] = "invalid_auth" + + return self.async_show_form( + step_id="auth", data_schema=STEP_AUTH_DATA_SCHEMA, errors=errors + ) + + async def async_step_zeroconf( + self, discovery_info: zeroconf.ZeroconfServiceInfo + ) -> ConfigFlowResult: + """Handle a discovered Lan coordinator.""" + local_name = discovery_info.hostname[:-1] + node_name = local_name.removesuffix(".local") + + self.host = local_name + self.context["title_placeholders"] = {CONF_NAME: node_name} + self.client = Api2(self.host, session=async_get_clientsession(self.hass)) + + mac = discovery_info.properties.get("mac") + # fallback for legacy firmware + if mac is None: + info = await self.client.get_info() + mac = info.MAC + await self.async_set_unique_id(format_mac(mac)) + self._abort_if_unique_id_configured() + + return await self.async_step_confirm_discovery() + + async def async_step_confirm_discovery( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle discovery confirm.""" + errors: dict[str, str] = {} + + if user_input is not None: + user_input[CONF_HOST] = self.host + try: + if not await self._async_check_auth_required(user_input): + return await self._async_complete_entry(user_input) + + except SmlightConnectionError: + return self.async_abort(reason="cannot_connect") + + except SmlightAuthError: + return await self.async_step_auth() + + self._set_confirm_only() + + return self.async_show_form( + step_id="confirm_discovery", + description_placeholders={"host": self.host}, + errors=errors, + ) + + async def _async_check_auth_required(self, user_input: dict[str, Any]) -> bool: + """Check if auth required and attempt to authenticate.""" + if await self.client.check_auth_needed(): + if user_input.get(CONF_USERNAME) and user_input.get(CONF_PASSWORD): + return not await self.client.authenticate( + user_input[CONF_USERNAME], user_input[CONF_PASSWORD] + ) + raise SmlightAuthError + return False + + async def _async_complete_entry( + self, user_input: dict[str, Any] + ) -> ConfigFlowResult: + info = await self.client.get_info() + await self.async_set_unique_id(format_mac(info.MAC)) + self._abort_if_unique_id_configured() + + if user_input.get(CONF_HOST) is None: + user_input[CONF_HOST] = self.host + + assert info.model is not None + return self.async_create_entry(title=info.model, data=user_input) diff --git a/homeassistant/components/smlight/const.py b/homeassistant/components/smlight/const.py new file mode 100644 index 00000000000..de3270fe3be --- /dev/null +++ b/homeassistant/components/smlight/const.py @@ -0,0 +1,11 @@ +"""Constants for the SMLIGHT Zigbee integration.""" + +from datetime import timedelta +import logging + +DOMAIN = "smlight" + +ATTR_MANUFACTURER = "SMLIGHT" + +LOGGER = logging.getLogger(__package__) +SCAN_INTERVAL = timedelta(seconds=300) diff --git a/homeassistant/components/smlight/coordinator.py b/homeassistant/components/smlight/coordinator.py new file mode 100644 index 00000000000..6a29f14fafd --- /dev/null +++ b/homeassistant/components/smlight/coordinator.py @@ -0,0 +1,71 @@ +"""DataUpdateCoordinator for Smlight.""" + +from dataclasses import dataclass + +from pysmlight import Api2, Info, Sensors +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryError +from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.device_registry import format_mac +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +from .const import DOMAIN, LOGGER, SCAN_INTERVAL + + +@dataclass +class SmData: + """SMLIGHT data stored in the DataUpdateCoordinator.""" + + sensors: Sensors + info: Info + + +class SmDataUpdateCoordinator(DataUpdateCoordinator[SmData]): + """Class to manage fetching SMLIGHT data.""" + + config_entry: ConfigEntry + + def __init__(self, hass: HomeAssistant, host: str) -> None: + """Initialize the coordinator.""" + super().__init__( + hass, + LOGGER, + name=f"{DOMAIN}_{host}", + update_interval=SCAN_INTERVAL, + ) + + self.unique_id: str | None = None + self.client = Api2(host=host, session=async_get_clientsession(hass)) + + async def _async_setup(self) -> None: + """Authenticate if needed during initial setup.""" + if await self.client.check_auth_needed(): + if ( + CONF_USERNAME in self.config_entry.data + and CONF_PASSWORD in self.config_entry.data + ): + try: + await self.client.authenticate( + self.config_entry.data[CONF_USERNAME], + self.config_entry.data[CONF_PASSWORD], + ) + except SmlightAuthError as err: + LOGGER.error("Failed to authenticate: %s", err) + raise ConfigEntryError from err + + info = await self.client.get_info() + self.unique_id = format_mac(info.MAC) + + async def _async_update_data(self) -> SmData: + """Fetch data from the SMLIGHT device.""" + try: + return SmData( + sensors=await self.client.get_sensors(), + info=await self.client.get_info(), + ) + except SmlightConnectionError as err: + raise UpdateFailed(err) from err diff --git a/homeassistant/components/smlight/entity.py b/homeassistant/components/smlight/entity.py new file mode 100644 index 00000000000..50767d3bf74 --- /dev/null +++ b/homeassistant/components/smlight/entity.py @@ -0,0 +1,31 @@ +"""Base class for all SMLIGHT entities.""" + +from __future__ import annotations + +from homeassistant.helpers.device_registry import ( + CONNECTION_NETWORK_MAC, + DeviceInfo, + format_mac, +) +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from .const import ATTR_MANUFACTURER +from .coordinator import SmDataUpdateCoordinator + + +class SmEntity(CoordinatorEntity[SmDataUpdateCoordinator]): + """Base class for all SMLight entities.""" + + _attr_has_entity_name = True + + def __init__(self, coordinator: SmDataUpdateCoordinator) -> None: + """Initialize entity with device.""" + super().__init__(coordinator) + mac = format_mac(coordinator.data.info.MAC) + self._attr_device_info = DeviceInfo( + configuration_url=f"http://{coordinator.client.host}", + connections={(CONNECTION_NETWORK_MAC, mac)}, + manufacturer=ATTR_MANUFACTURER, + model=coordinator.data.info.model, + sw_version=f"core: {coordinator.data.info.sw_version} / zigbee: {coordinator.data.info.zb_version}", + ) diff --git a/homeassistant/components/smlight/manifest.json b/homeassistant/components/smlight/manifest.json new file mode 100644 index 00000000000..72d915666e5 --- /dev/null +++ b/homeassistant/components/smlight/manifest.json @@ -0,0 +1,15 @@ +{ + "domain": "smlight", + "name": "SMLIGHT SLZB", + "codeowners": ["@tl-sl"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/smlight", + "integration_type": "device", + "iot_class": "local_polling", + "requirements": ["pysmlight==0.0.13"], + "zeroconf": [ + { + "type": "_slzb-06._tcp.local." + } + ] +} diff --git a/homeassistant/components/smlight/sensor.py b/homeassistant/components/smlight/sensor.py new file mode 100644 index 00000000000..d9c03760fb8 --- /dev/null +++ b/homeassistant/components/smlight/sensor.py @@ -0,0 +1,103 @@ +"""Support for SLZB-06 sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass + +from pysmlight import Sensors + +from homeassistant.components.sensor import ( + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import EntityCategory, UnitOfInformation, UnitOfTemperature +from homeassistant.core import HomeAssistant +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import SmConfigEntry +from .coordinator import SmDataUpdateCoordinator +from .entity import SmEntity + + +@dataclass(frozen=True, kw_only=True) +class SmSensorEntityDescription(SensorEntityDescription): + """Class describing SMLIGHT sensor entities.""" + + entity_category = EntityCategory.DIAGNOSTIC + value_fn: Callable[[Sensors], float | None] + + +SENSORS = [ + SmSensorEntityDescription( + key="core_temperature", + translation_key="core_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda x: x.esp32_temp, + ), + SmSensorEntityDescription( + key="zigbee_temperature", + translation_key="zigbee_temperature", + device_class=SensorDeviceClass.TEMPERATURE, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, + suggested_display_precision=1, + value_fn=lambda x: x.zb_temp, + ), + SmSensorEntityDescription( + key="ram_usage", + translation_key="ram_usage", + device_class=SensorDeviceClass.DATA_SIZE, + native_unit_of_measurement=UnitOfInformation.KILOBYTES, + entity_registry_enabled_default=False, + value_fn=lambda x: x.ram_usage, + ), + SmSensorEntityDescription( + key="fs_usage", + translation_key="fs_usage", + device_class=SensorDeviceClass.DATA_SIZE, + native_unit_of_measurement=UnitOfInformation.KILOBYTES, + entity_registry_enabled_default=False, + value_fn=lambda x: x.fs_used, + ), +] + + +async def async_setup_entry( + hass: HomeAssistant, + entry: SmConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up SMLIGHT sensor based on a config entry.""" + coordinator = entry.runtime_data + + async_add_entities( + SmSensorEntity(coordinator, description) for description in SENSORS + ) + + +class SmSensorEntity(SmEntity, SensorEntity): + """Representation of a slzb sensor.""" + + entity_description: SmSensorEntityDescription + + def __init__( + self, + coordinator: SmDataUpdateCoordinator, + description: SmSensorEntityDescription, + ) -> None: + """Initiate slzb sensor.""" + super().__init__(coordinator) + + self.entity_description = description + self._attr_unique_id = f"{coordinator.unique_id}_{description.key}" + + @property + def native_value(self) -> float | None: + """Return the sensor value.""" + return self.entity_description.value_fn(self.coordinator.data.sensors) diff --git a/homeassistant/components/smlight/strings.json b/homeassistant/components/smlight/strings.json new file mode 100644 index 00000000000..02b9ebcc4e8 --- /dev/null +++ b/homeassistant/components/smlight/strings.json @@ -0,0 +1,49 @@ +{ + "config": { + "step": { + "user": { + "description": "Set up SMLIGHT Zigbee Integration", + "data": { + "host": "[%key:common::config_flow::data::host%]" + }, + "data_description": { + "host": "The hostname or IP address of the SMLIGHT SLZB-06x device" + } + }, + "auth": { + "description": "Please enter the username and password", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + }, + "confirm_discovery": { + "description": "Do you want to set up SMLIGHT at {host}?" + } + }, + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "sensor": { + "zigbee_temperature": { + "name": "Zigbee chip temp" + }, + "core_temperature": { + "name": "Core chip temp" + }, + "fs_usage": { + "name": "Filesystem usage" + }, + "ram_usage": { + "name": "RAM usage" + } + } + } +} diff --git a/homeassistant/components/sms/config_flow.py b/homeassistant/components/sms/config_flow.py index aec9674da9d..d2188a94632 100644 --- a/homeassistant/components/sms/config_flow.py +++ b/homeassistant/components/sms/config_flow.py @@ -1,11 +1,12 @@ """Config flow for SMS integration.""" import logging +from typing import Any import gammu import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_DEVICE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -26,7 +27,7 @@ DATA_SCHEMA = vol.Schema( ) -async def get_imei_from_config(hass: HomeAssistant, data): +async def get_imei_from_config(hass: HomeAssistant, data: dict[str, Any]) -> str: """Validate the user input allows us to connect. Data has the keys from DATA_SCHEMA with values provided by the user. @@ -56,7 +57,9 @@ class SMSFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -79,10 +82,6 @@ class SMSFlowHandler(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): - """Handle import.""" - return await self.async_step_user(user_input) - class CannotConnect(HomeAssistantError): """Error to indicate we cannot connect.""" diff --git a/homeassistant/components/smtp/icons.json b/homeassistant/components/smtp/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/smtp/icons.json +++ b/homeassistant/components/smtp/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/snapcast/icons.json b/homeassistant/components/snapcast/icons.json index bdc20665282..d6511d768e2 100644 --- a/homeassistant/components/snapcast/icons.json +++ b/homeassistant/components/snapcast/icons.json @@ -1,9 +1,19 @@ { "services": { - "join": "mdi:music-note-plus", - "unjoin": "mdi:music-note-minus", - "snapshot": "mdi:camera", - "restore": "mdi:camera-retake", - "set_latency": "mdi:camera-timer" + "join": { + "service": "mdi:music-note-plus" + }, + "unjoin": { + "service": "mdi:music-note-minus" + }, + "snapshot": { + "service": "mdi:camera" + }, + "restore": { + "service": "mdi:camera-retake" + }, + "set_latency": { + "service": "mdi:camera-timer" + } } } diff --git a/homeassistant/components/snapcast/media_player.py b/homeassistant/components/snapcast/media_player.py index 0918d6465ad..bda411acde3 100644 --- a/homeassistant/components/snapcast/media_player.py +++ b/homeassistant/components/snapcast/media_player.py @@ -42,12 +42,12 @@ def register_services(): """Register snapcast services.""" platform = entity_platform.async_get_current_platform() - platform.async_register_entity_service(SERVICE_SNAPSHOT, {}, "snapshot") - platform.async_register_entity_service(SERVICE_RESTORE, {}, "async_restore") + platform.async_register_entity_service(SERVICE_SNAPSHOT, None, "snapshot") + platform.async_register_entity_service(SERVICE_RESTORE, None, "async_restore") platform.async_register_entity_service( SERVICE_JOIN, {vol.Required(ATTR_MASTER): cv.entity_id}, handle_async_join ) - platform.async_register_entity_service(SERVICE_UNJOIN, {}, handle_async_unjoin) + platform.async_register_entity_service(SERVICE_UNJOIN, None, handle_async_unjoin) platform.async_register_entity_service( SERVICE_SET_LATENCY, {vol.Required(ATTR_LATENCY): cv.positive_int}, diff --git a/homeassistant/components/snips/icons.json b/homeassistant/components/snips/icons.json index 0d465465fe4..9c86a7ad5b3 100644 --- a/homeassistant/components/snips/icons.json +++ b/homeassistant/components/snips/icons.json @@ -1,8 +1,16 @@ { "services": { - "feedback_off": "mdi:message-alert", - "feedback_on": "mdi:message-alert", - "say": "mdi:chat", - "say_action": "mdi:account-voice" + "feedback_off": { + "service": "mdi:message-alert" + }, + "feedback_on": { + "service": "mdi:message-alert" + }, + "say": { + "service": "mdi:chat" + }, + "say_action": { + "service": "mdi:account-voice" + } } } diff --git a/homeassistant/components/snmp/manifest.json b/homeassistant/components/snmp/manifest.json index d79910c44cd..c3970e1e00a 100644 --- a/homeassistant/components/snmp/manifest.json +++ b/homeassistant/components/snmp/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/snmp", "iot_class": "local_polling", "loggers": ["pyasn1", "pysmi", "pysnmp"], - "requirements": ["pysnmp-lextudio==6.0.11"] + "requirements": ["pysnmp==6.2.5"] } diff --git a/homeassistant/components/snmp/sensor.py b/homeassistant/components/snmp/sensor.py index fb7b87403cb..4586d0600e9 100644 --- a/homeassistant/components/snmp/sensor.py +++ b/homeassistant/components/snmp/sensor.py @@ -174,8 +174,6 @@ async def async_setup_platform( trigger_entity_config[key] = config[key] value_template: Template | None = config.get(CONF_VALUE_TEMPLATE) - if value_template is not None: - value_template.hass = hass data = SnmpData(request_args, baseoid, accept_errors, default_value) async_add_entities([SnmpSensor(hass, data, trigger_entity_config, value_template)]) diff --git a/homeassistant/components/snmp/switch.py b/homeassistant/components/snmp/switch.py index e3ce09cbf48..92e27daed6c 100644 --- a/homeassistant/components/snmp/switch.py +++ b/homeassistant/components/snmp/switch.py @@ -277,6 +277,11 @@ class SnmpSwitch(SwitchEntity): ): self._state = False else: + _LOGGER.warning( + "Invalid payload '%s' received for entity %s, state is unknown", + resrow[-1], + self.entity_id, + ) self._state = None @property diff --git a/homeassistant/components/snooz/icons.json b/homeassistant/components/snooz/icons.json index d9cccfff4ea..be7d2714a20 100644 --- a/homeassistant/components/snooz/icons.json +++ b/homeassistant/components/snooz/icons.json @@ -1,6 +1,10 @@ { "services": { - "transition_on": "mdi:blur", - "transition_off": "mdi:blur-off" + "transition_on": { + "service": "mdi:blur" + }, + "transition_off": { + "service": "mdi:blur-off" + } } } diff --git a/homeassistant/components/solaredge/__init__.py b/homeassistant/components/solaredge/__init__.py index 41448bae98d..206a2499494 100644 --- a/homeassistant/components/solaredge/__init__.py +++ b/homeassistant/components/solaredge/__init__.py @@ -11,13 +11,10 @@ from homeassistant.const import CONF_API_KEY, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers.aiohttp_client import async_get_clientsession -import homeassistant.helpers.config_validation as cv -from .const import CONF_SITE_ID, DOMAIN, LOGGER +from .const import CONF_SITE_ID, LOGGER from .types import SolarEdgeConfigEntry, SolarEdgeData -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - PLATFORMS = [Platform.SENSOR] diff --git a/homeassistant/components/solarlog/config_flow.py b/homeassistant/components/solarlog/config_flow.py index 7c8401be2b8..4587cb7d886 100644 --- a/homeassistant/components/solarlog/config_flow.py +++ b/homeassistant/components/solarlog/config_flow.py @@ -101,23 +101,6 @@ class SolarLogConfigFlow(ConfigFlow, domain=DOMAIN): errors=self._errors, ) - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry.""" - - user_input = { - CONF_HOST: DEFAULT_HOST, - CONF_NAME: DEFAULT_NAME, - "extended_data": False, - **user_input, - } - - user_input[CONF_HOST] = self._parse_url(user_input[CONF_HOST]) - - if self._host_in_configuration_exists(user_input[CONF_HOST]): - return self.async_abort(reason="already_configured") - - return await self.async_step_user(user_input) - async def async_step_reconfigure( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/solarlog/manifest.json b/homeassistant/components/solarlog/manifest.json index 0878d652f43..0c097b7146d 100644 --- a/homeassistant/components/solarlog/manifest.json +++ b/homeassistant/components/solarlog/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/solarlog", "iot_class": "local_polling", "loggers": ["solarlog_cli"], - "requirements": ["solarlog_cli==0.1.5"] + "requirements": ["solarlog_cli==0.1.6"] } diff --git a/homeassistant/components/soma/config_flow.py b/homeassistant/components/soma/config_flow.py index 773a24d5b44..586567611f7 100644 --- a/homeassistant/components/soma/config_flow.py +++ b/homeassistant/components/soma/config_flow.py @@ -1,12 +1,13 @@ """Config flow for Soma.""" import logging +from typing import Any from api.soma_api import SomaApi from requests import RequestException import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PORT from .const import DOMAIN @@ -24,7 +25,9 @@ class SomaFlowHandler(ConfigFlow, domain=DOMAIN): def __init__(self) -> None: """Instantiate config flow.""" - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" if user_input is None: data = { @@ -64,8 +67,8 @@ class SomaFlowHandler(ConfigFlow, domain=DOMAIN): _LOGGER.error("Connection to SOMA Connect failed with KeyError") return self.async_abort(reason="connection_error") - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle flow start from existing config section.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") - return await self.async_step_creation(user_input) + return await self.async_step_creation(import_data) diff --git a/homeassistant/components/somfy_mylink/__init__.py b/homeassistant/components/somfy_mylink/__init__.py index ed9652de55a..89796f5ce46 100644 --- a/homeassistant/components/somfy_mylink/__init__.py +++ b/homeassistant/components/somfy_mylink/__init__.py @@ -8,7 +8,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv from .const import CONF_SYSTEM_ID, DATA_SOMFY_MYLINK, DOMAIN, MYLINK_STATUS, PLATFORMS @@ -16,8 +15,6 @@ UNDO_UPDATE_LISTENER = "undo_update_listener" _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Somfy MyLink from a config entry.""" diff --git a/homeassistant/components/somfy_mylink/config_flow.py b/homeassistant/components/somfy_mylink/config_flow.py index a13f036210d..231f93b0cb7 100644 --- a/homeassistant/components/somfy_mylink/config_flow.py +++ b/homeassistant/components/somfy_mylink/config_flow.py @@ -4,6 +4,7 @@ from __future__ import annotations from copy import deepcopy import logging +from typing import Any from somfy_mylink_synergy import SomfyMyLinkSynergy import voluptuous as vol @@ -61,11 +62,11 @@ class SomfyConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the somfy_mylink flow.""" - self.host = None - self.mac = None - self.ip_address = None + self.host: str | None = None + self.mac: str | None = None + self.ip_address: str | None = None async def async_step_dhcp( self, discovery_info: dhcp.DhcpServiceInfo @@ -82,7 +83,9 @@ class SomfyConfigFlow(ConfigFlow, domain=DOMAIN): self.context["title_placeholders"] = {"ip": self.ip_address, "mac": self.mac} return await self.async_step_user() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} @@ -113,11 +116,6 @@ class SomfyConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, user_input): - """Handle import.""" - self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]}) - return await self.async_step_user(user_input) - @staticmethod @callback def async_get_options_flow( diff --git a/homeassistant/components/songpal/config_flow.py b/homeassistant/components/songpal/config_flow.py index f8a0db3815d..9ccf7a8f19c 100644 --- a/homeassistant/components/songpal/config_flow.py +++ b/homeassistant/components/songpal/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from urllib.parse import urlparse from songpal import Device, SongpalException @@ -36,7 +37,9 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the flow.""" self.conf: SongpalConfig | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if user_input is None: return self.async_show_form( @@ -123,10 +126,10 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): return await self.async_step_init() - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" - name = user_input.get(CONF_NAME) - endpoint = user_input.get(CONF_ENDPOINT) + name = import_data.get(CONF_NAME) + endpoint = import_data.get(CONF_ENDPOINT) parsed_url = urlparse(endpoint) # Try to connect to test the endpoint @@ -143,4 +146,4 @@ class SongpalConfigFlow(ConfigFlow, domain=DOMAIN): self.conf = SongpalConfig(name, parsed_url.hostname, endpoint) - return await self.async_step_init(user_input) + return await self.async_step_init(import_data) diff --git a/homeassistant/components/songpal/icons.json b/homeassistant/components/songpal/icons.json index 1c831fbbd00..6e7cf359c23 100644 --- a/homeassistant/components/songpal/icons.json +++ b/homeassistant/components/songpal/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_sound_setting": "mdi:volume-high" + "set_sound_setting": { + "service": "mdi:volume-high" + } } } diff --git a/homeassistant/components/sonos/icons.json b/homeassistant/components/sonos/icons.json index e2545358ba6..45027d8eabd 100644 --- a/homeassistant/components/sonos/icons.json +++ b/homeassistant/components/sonos/icons.json @@ -44,12 +44,29 @@ } }, "services": { - "snapshot": "mdi:camera", - "restore": "mdi:camera-retake", - "set_sleep_timer": "mdi:alarm", - "clear_sleep_timer": "mdi:alarm-off", - "play_queue": "mdi:play", - "remove_from_queue": "mdi:playlist-remove", - "update_alarm": "mdi:alarm" + "snapshot": { + "service": "mdi:camera" + }, + "restore": { + "service": "mdi:camera-retake" + }, + "set_sleep_timer": { + "service": "mdi:alarm" + }, + "clear_sleep_timer": { + "service": "mdi:alarm-off" + }, + "play_queue": { + "service": "mdi:play" + }, + "remove_from_queue": { + "service": "mdi:playlist-remove" + }, + "update_alarm": { + "service": "mdi:alarm" + }, + "get_queue": { + "service": "mdi:queue-first-in-last-out" + } } } diff --git a/homeassistant/components/sonos/media_player.py b/homeassistant/components/sonos/media_player.py index 4125466bd99..75527bdcb72 100644 --- a/homeassistant/components/sonos/media_player.py +++ b/homeassistant/components/sonos/media_player.py @@ -14,7 +14,7 @@ from soco.core import ( PLAY_MODE_BY_MEANING, PLAY_MODES, ) -from soco.data_structures import DidlFavorite +from soco.data_structures import DidlFavorite, DidlMusicTrack from soco.ms_data_structures import MusicServiceItem from sonos_websocket.exception import SonosWebsocketError import voluptuous as vol @@ -22,8 +22,12 @@ import voluptuous as vol from homeassistant.components import media_source, spotify from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, + ATTR_MEDIA_ALBUM_NAME, ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_ARTIST, + ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_TITLE, BrowseMedia, MediaPlayerDeviceClass, MediaPlayerEnqueue, @@ -38,7 +42,7 @@ from homeassistant.components.plex import PLEX_URI_SCHEME from homeassistant.components.plex.services import process_plex_payload from homeassistant.config_entries import ConfigEntry from homeassistant.const import ATTR_TIME -from homeassistant.core import HomeAssistant, ServiceCall, callback +from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import config_validation as cv, entity_platform, service from homeassistant.helpers.dispatcher import async_dispatcher_connect @@ -88,6 +92,7 @@ SERVICE_CLEAR_TIMER = "clear_sleep_timer" SERVICE_UPDATE_ALARM = "update_alarm" SERVICE_PLAY_QUEUE = "play_queue" SERVICE_REMOVE_FROM_QUEUE = "remove_from_queue" +SERVICE_GET_QUEUE = "get_queue" ATTR_SLEEP_TIME = "sleep_time" ATTR_ALARM_ID = "alarm_id" @@ -162,7 +167,9 @@ async def async_setup_entry( "set_sleep_timer", ) - platform.async_register_entity_service(SERVICE_CLEAR_TIMER, {}, "clear_sleep_timer") + platform.async_register_entity_service( + SERVICE_CLEAR_TIMER, None, "clear_sleep_timer" + ) platform.async_register_entity_service( SERVICE_UPDATE_ALARM, @@ -188,6 +195,13 @@ async def async_setup_entry( "remove_from_queue", ) + platform.async_register_entity_service( + SERVICE_GET_QUEUE, + None, + "get_queue", + supports_response=SupportsResponse.ONLY, + ) + class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): """Representation of a Sonos entity.""" @@ -540,8 +554,13 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): raise HomeAssistantError( f"Error when calling Sonos websocket: {exc}" ) from exc - if response["success"]: + if response.get("success"): return + raise HomeAssistantError( + translation_domain=SONOS_DOMAIN, + translation_key="announce_media_error", + translation_placeholders={"media_id": media_id, "response": response}, + ) if spotify.is_spotify_media_type(media_type): media_type = spotify.resolve_spotify_media_type(media_type) @@ -641,11 +660,16 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): playlists = soco.get_sonos_playlists(complete_result=True) playlist = next((p for p in playlists if p.title == media_id), None) if not playlist: - _LOGGER.error('Could not find a Sonos playlist named "%s"', media_id) - else: - soco.clear_queue() - soco.add_to_queue(playlist, timeout=LONG_SERVICE_TIMEOUT) - soco.play_from_queue(0) + raise ServiceValidationError( + translation_domain=SONOS_DOMAIN, + translation_key="invalid_sonos_playlist", + translation_placeholders={ + "name": media_id, + }, + ) + soco.clear_queue() + soco.add_to_queue(playlist, timeout=LONG_SERVICE_TIMEOUT) + soco.play_from_queue(0) elif media_type in PLAYABLE_MEDIA_TYPES: item = media_browser.get_media(self.media.library, media_id, media_type) @@ -729,6 +753,20 @@ class SonosMediaPlayerEntity(SonosEntity, MediaPlayerEntity): """Remove item from the queue.""" self.coordinator.soco.remove_from_queue(queue_position) + @soco_error() + def get_queue(self) -> list[dict]: + """Get the queue.""" + queue: list[DidlMusicTrack] = self.coordinator.soco.get_queue(max_items=0) + return [ + { + ATTR_MEDIA_TITLE: track.title, + ATTR_MEDIA_ALBUM_NAME: track.album, + ATTR_MEDIA_ARTIST: track.creator, + ATTR_MEDIA_CONTENT_ID: track.get_uri(), + } + for track in queue + ] + @property def extra_state_attributes(self) -> dict[str, Any]: """Return entity specific state attributes.""" diff --git a/homeassistant/components/sonos/services.yaml b/homeassistant/components/sonos/services.yaml index f6df83ef6ed..89706428899 100644 --- a/homeassistant/components/sonos/services.yaml +++ b/homeassistant/components/sonos/services.yaml @@ -63,6 +63,12 @@ remove_from_queue: max: 10000 mode: box +get_queue: + target: + entity: + integration: sonos + domain: media_player + update_alarm: target: device: diff --git a/homeassistant/components/sonos/strings.json b/homeassistant/components/sonos/strings.json index 6521302b007..264420ef758 100644 --- a/homeassistant/components/sonos/strings.json +++ b/homeassistant/components/sonos/strings.json @@ -172,11 +172,21 @@ "description": "Enable or disable including grouped rooms." } } + }, + "get_queue": { + "name": "Get queue", + "description": "Returns the contents of the queue." } }, "exceptions": { "invalid_favorite": { "message": "Could not find a Sonos favorite: {name}" + }, + "invalid_sonos_playlist": { + "message": "Could not find Sonos playlist: {name}" + }, + "announce_media_error": { + "message": "Announcing clip {media_id} failed {response}" } } } diff --git a/homeassistant/components/soundtouch/config_flow.py b/homeassistant/components/soundtouch/config_flow.py index c8e8ce945db..fea63366db9 100644 --- a/homeassistant/components/soundtouch/config_flow.py +++ b/homeassistant/components/soundtouch/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Bose SoundTouch integration.""" import logging +from typing import Any from libsoundtouch import soundtouch_device from requests import RequestException @@ -21,12 +22,14 @@ class SoundtouchConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize a new SoundTouch config flow.""" - self.host = None + self.host: str | None = None self.name = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} diff --git a/homeassistant/components/soundtouch/icons.json b/homeassistant/components/soundtouch/icons.json index 0dd41f4f881..721a5c77032 100644 --- a/homeassistant/components/soundtouch/icons.json +++ b/homeassistant/components/soundtouch/icons.json @@ -1,8 +1,16 @@ { "services": { - "play_everywhere": "mdi:play", - "create_zone": "mdi:plus", - "add_zone_slave": "mdi:plus", - "remove_zone_slave": "mdi:minus" + "play_everywhere": { + "service": "mdi:play" + }, + "create_zone": { + "service": "mdi:plus" + }, + "add_zone_slave": { + "service": "mdi:plus" + }, + "remove_zone_slave": { + "service": "mdi:minus" + } } } diff --git a/homeassistant/components/spc/__init__.py b/homeassistant/components/spc/__init__.py index bb025d699fc..3d9467f2041 100644 --- a/homeassistant/components/spc/__init__.py +++ b/homeassistant/components/spc/__init__.py @@ -41,7 +41,7 @@ CONFIG_SCHEMA = vol.Schema( async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up the SPC component.""" - async def async_upate_callback(spc_object): + async def async_update_callback(spc_object): if isinstance(spc_object, Area): async_dispatcher_send(hass, SIGNAL_UPDATE_ALARM.format(spc_object.id)) elif isinstance(spc_object, Zone): @@ -54,7 +54,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: session=session, api_url=config[DOMAIN].get(CONF_API_URL), ws_url=config[DOMAIN].get(CONF_WS_URL), - async_callback=async_upate_callback, + async_callback=async_update_callback, ) hass.data[DATA_API] = spc diff --git a/homeassistant/components/spider/config_flow.py b/homeassistant/components/spider/config_flow.py index a678ea73051..0c305adbc39 100644 --- a/homeassistant/components/spider/config_flow.py +++ b/homeassistant/components/spider/config_flow.py @@ -1,11 +1,12 @@ """Config flow for Spider.""" import logging +from typing import Any from spiderpy.spiderapi import SpiderApi, SpiderApiException, UnauthorizedException import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_SCAN_INTERVAL, CONF_USERNAME from .const import DEFAULT_SCAN_INTERVAL, DOMAIN @@ -49,7 +50,9 @@ class SpiderConfigFlow(ConfigFlow, domain=DOMAIN): return RESULT_SUCCESS - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -79,6 +82,6 @@ class SpiderConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import spider config from configuration.yaml.""" return await self.async_step_user(import_data) diff --git a/homeassistant/components/spotify/media_player.py b/homeassistant/components/spotify/media_player.py index bd1bcdfd43e..3653bdb149a 100644 --- a/homeassistant/components/spotify/media_player.py +++ b/homeassistant/components/spotify/media_player.py @@ -31,7 +31,7 @@ from homeassistant.util.dt import utcnow from . import SpotifyConfigEntry from .browse_media import async_browse_media_internal -from .const import DOMAIN, MEDIA_PLAYER_PREFIX, PLAYABLE_MEDIA_TYPES, SPOTIFY_SCOPES +from .const import DOMAIN, MEDIA_PLAYER_PREFIX, PLAYABLE_MEDIA_TYPES from .models import HomeAssistantSpotifyData from .util import fetch_image_url @@ -138,10 +138,6 @@ class SpotifyMediaPlayer(MediaPlayerEntity): entry_type=DeviceEntryType.SERVICE, configuration_url="https://open.spotify.com", ) - - self._scope_ok = set(data.session.token["scope"].split(" ")).issuperset( - SPOTIFY_SCOPES - ) self._currently_playing: dict | None = {} self._playlist: dict | None = None self._restricted_device: bool = False @@ -459,13 +455,6 @@ class SpotifyMediaPlayer(MediaPlayerEntity): ) -> BrowseMedia: """Implement the websocket media browsing helper.""" - if not self._scope_ok: - _LOGGER.debug( - "Spotify scopes are not set correctly, this can impact features such as" - " media browsing" - ) - raise NotImplementedError - return await async_browse_media_internal( self.hass, self.data.client, diff --git a/homeassistant/components/sql/sensor.py b/homeassistant/components/sql/sensor.py index f09f7ae95cf..1d033728c0d 100644 --- a/homeassistant/components/sql/sensor.py +++ b/homeassistant/components/sql/sensor.py @@ -81,9 +81,6 @@ async def async_setup_platform( unique_id: str | None = conf.get(CONF_UNIQUE_ID) db_url: str = resolve_db_url(hass, conf.get(CONF_DB_URL)) - if value_template is not None: - value_template.hass = hass - trigger_entity_config = {CONF_NAME: name} for key in TRIGGER_ENTITY_OPTIONS: if key not in conf: @@ -117,12 +114,10 @@ async def async_setup_entry( value_template: Template | None = None if template is not None: try: - value_template = Template(template) + value_template = Template(template, hass) value_template.ensure_valid() except TemplateError: value_template = None - if value_template is not None: - value_template.hass = hass name_template = Template(name, hass) trigger_entity_config = {CONF_NAME: name_template, CONF_UNIQUE_ID: entry.entry_id} diff --git a/homeassistant/components/squeezebox/config_flow.py b/homeassistant/components/squeezebox/config_flow.py index 9ccac13223b..fe57b12516a 100644 --- a/homeassistant/components/squeezebox/config_flow.py +++ b/homeassistant/components/squeezebox/config_flow.py @@ -3,7 +3,7 @@ import asyncio from http import HTTPStatus import logging -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from pysqueezebox import Server, async_discover import voluptuous as vol @@ -102,7 +102,7 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): # update with suggested values from discovery self.data_schema = _base_schema(self.discovery_info) - async def _validate_input(self, data): + async def _validate_input(self, data: dict[str, Any]) -> str | None: """Validate the user input allows us to connect. Retrieve unique id and abort if already configured. @@ -129,7 +129,11 @@ class SqueezeboxConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(status["uuid"]) self._abort_if_unique_id_configured() - async def async_step_user(self, user_input=None): + return None + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" errors = {} if user_input and CONF_HOST in user_input: diff --git a/homeassistant/components/squeezebox/icons.json b/homeassistant/components/squeezebox/icons.json index d58f0d5634d..b11311e1292 100644 --- a/homeassistant/components/squeezebox/icons.json +++ b/homeassistant/components/squeezebox/icons.json @@ -1,8 +1,16 @@ { "services": { - "call_method": "mdi:console", - "call_query": "mdi:database", - "sync": "mdi:sync", - "unsync": "mdi:sync-off" + "call_method": { + "service": "mdi:console" + }, + "call_query": { + "service": "mdi:database" + }, + "sync": { + "service": "mdi:sync" + }, + "unsync": { + "service": "mdi:sync-off" + } } } diff --git a/homeassistant/components/squeezebox/media_player.py b/homeassistant/components/squeezebox/media_player.py index c0a6dad7a47..552b8ed800c 100644 --- a/homeassistant/components/squeezebox/media_player.py +++ b/homeassistant/components/squeezebox/media_player.py @@ -185,7 +185,7 @@ async def async_setup_entry( {vol.Required(ATTR_OTHER_PLAYER): cv.string}, "async_sync", ) - platform.async_register_entity_service(SERVICE_UNSYNC, {}, "async_unsync") + platform.async_register_entity_service(SERVICE_UNSYNC, None, "async_unsync") # Start server discovery task if not already running entry.async_on_unload(async_at_start(hass, start_server_discovery)) @@ -281,10 +281,11 @@ class SqueezeBoxEntity(MediaPlayerEntity): self.hass.data[DOMAIN][KNOWN_PLAYERS].remove(self) @property - def volume_level(self): + def volume_level(self) -> float | None: """Volume level of the media player (0..1).""" if self._player.volume: return int(float(self._player.volume)) / 100.0 + return None @property def is_volume_muted(self): diff --git a/homeassistant/components/starline/config_flow.py b/homeassistant/components/starline/config_flow.py index c13586d0bc3..e27885e6c60 100644 --- a/homeassistant/components/starline/config_flow.py +++ b/homeassistant/components/starline/config_flow.py @@ -2,10 +2,12 @@ from __future__ import annotations +from typing import Any + from starline import StarlineAuth import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback @@ -52,7 +54,9 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): self._auth = StarlineAuth() - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" return await self.async_step_auth_app(user_input) @@ -214,8 +218,7 @@ class StarlineFlowHandler(ConfigFlow, domain=DOMAIN): self._captcha_image = data["captchaImg"] return self._async_form_auth_captcha(error) - # pylint: disable=broad-exception-raised - raise Exception(data) + raise Exception(data) # noqa: TRY002, TRY301 except Exception as err: # noqa: BLE001 _LOGGER.error("Error auth user: %s", err) return self._async_form_auth_user(ERROR_AUTH_USER) diff --git a/homeassistant/components/starline/icons.json b/homeassistant/components/starline/icons.json index b98c4178af1..8a4f85a89bf 100644 --- a/homeassistant/components/starline/icons.json +++ b/homeassistant/components/starline/icons.json @@ -72,8 +72,14 @@ } }, "services": { - "update_state": "mdi:reload", - "set_scan_interval": "mdi:timer", - "set_scan_obd_interval": "mdi:timer" + "update_state": { + "service": "mdi:reload" + }, + "set_scan_interval": { + "service": "mdi:timer" + }, + "set_scan_obd_interval": { + "service": "mdi:timer" + } } } diff --git a/homeassistant/components/starline/sensor.py b/homeassistant/components/starline/sensor.py index a53751a3b23..f9bd304c1e1 100644 --- a/homeassistant/components/starline/sensor.py +++ b/homeassistant/components/starline/sensor.py @@ -6,6 +6,7 @@ from homeassistant.components.sensor import ( SensorDeviceClass, SensorEntity, SensorEntityDescription, + SensorStateClass, ) from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( @@ -30,47 +31,57 @@ SENSOR_TYPES: tuple[SensorEntityDescription, ...] = ( translation_key="battery", device_class=SensorDeviceClass.VOLTAGE, native_unit_of_measurement=UnitOfElectricPotential.VOLT, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="balance", translation_key="balance", + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="ctemp", translation_key="interior_temperature", device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="etemp", translation_key="engine_temperature", device_class=SensorDeviceClass.TEMPERATURE, native_unit_of_measurement=UnitOfTemperature.CELSIUS, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="gsm_lvl", translation_key="gsm_signal", native_unit_of_measurement=PERCENTAGE, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="fuel", translation_key="fuel", + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="errors", translation_key="errors", + native_unit_of_measurement="errors", entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, ), SensorEntityDescription( key="mileage", translation_key="mileage", native_unit_of_measurement=UnitOfLength.KILOMETERS, device_class=SensorDeviceClass.DISTANCE, + state_class=SensorStateClass.TOTAL_INCREASING, ), SensorEntityDescription( key="gps_count", translation_key="gps_count", native_unit_of_measurement="satellites", + state_class=SensorStateClass.MEASUREMENT, ), ) diff --git a/homeassistant/components/starline/strings.json b/homeassistant/components/starline/strings.json index 6f0c42f0882..14a8ed5a035 100644 --- a/homeassistant/components/starline/strings.json +++ b/homeassistant/components/starline/strings.json @@ -114,9 +114,6 @@ "additional_channel": { "name": "Additional channel" }, - "horn": { - "name": "Horn" - }, "service_mode": { "name": "Service mode" } @@ -127,12 +124,6 @@ } } }, - "issues": { - "deprecated_horn_switch": { - "title": "The Starline Horn switch entity is being removed", - "description": "Using the Horn switch is now deprecated and will be removed in a future version of Home Assistant.\n\nPlease adjust any automations or scripts that use Horn switch entity to instead use the Horn button entity." - } - }, "services": { "update_state": { "name": "Update state", diff --git a/homeassistant/components/starline/switch.py b/homeassistant/components/starline/switch.py index 8ca736d2ac5..1b48a72c732 100644 --- a/homeassistant/components/starline/switch.py +++ b/homeassistant/components/starline/switch.py @@ -8,7 +8,6 @@ from homeassistant.components.switch import SwitchEntity, SwitchEntityDescriptio from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, create_issue from .account import StarlineAccount, StarlineDevice from .const import DOMAIN @@ -27,11 +26,6 @@ SWITCH_TYPES: tuple[SwitchEntityDescription, ...] = ( key="out", translation_key="additional_channel", ), - # Deprecated and should be removed in 2024.8 - SwitchEntityDescription( - key="poke", - translation_key="horn", - ), SwitchEntityDescription( key="valet", translation_key="service_mode", @@ -90,16 +84,6 @@ class StarlineSwitch(StarlineEntity, SwitchEntity): def turn_on(self, **kwargs: Any) -> None: """Turn the entity on.""" - if self._key == "poke": - create_issue( - self.hass, - DOMAIN, - "deprecated_horn_switch", - breaks_in_ha_version="2024.8.0", - is_fixable=False, - severity=IssueSeverity.WARNING, - translation_key="deprecated_horn_switch", - ) self._account.api.set_car_state(self._device.device_id, self._key, True) def turn_off(self, **kwargs: Any) -> None: diff --git a/homeassistant/components/statistics/icons.json b/homeassistant/components/statistics/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/statistics/icons.json +++ b/homeassistant/components/statistics/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/stream/fmp4utils.py b/homeassistant/components/stream/fmp4utils.py index 255d75e3b79..5080678e3ca 100644 --- a/homeassistant/components/stream/fmp4utils.py +++ b/homeassistant/components/stream/fmp4utils.py @@ -149,7 +149,8 @@ def get_codec_string(mp4_bytes: bytes) -> str: def find_moov(mp4_io: BufferedIOBase) -> int: """Find location of moov atom in a BufferedIOBase mp4.""" index = 0 - while 1: + # Ruff doesn't understand this loop - the exception is always raised at the end + while 1: # noqa: RET503 mp4_io.seek(index) box_header = mp4_io.read(8) if len(box_header) != 8 or box_header[0:4] == b"\x00\x00\x00\x00": diff --git a/homeassistant/components/stream/manifest.json b/homeassistant/components/stream/manifest.json index 37158aa5fe3..dffd6d65a6e 100644 --- a/homeassistant/components/stream/manifest.json +++ b/homeassistant/components/stream/manifest.json @@ -7,5 +7,5 @@ "integration_type": "system", "iot_class": "local_push", "quality_scale": "internal", - "requirements": ["PyTurboJPEG==1.7.1", "ha-av==10.1.1", "numpy==1.26.0"] + "requirements": ["PyTurboJPEG==1.7.5", "ha-av==10.1.1", "numpy==1.26.0"] } diff --git a/homeassistant/components/streamlabswater/icons.json b/homeassistant/components/streamlabswater/icons.json index aebe224b35e..0cc64fd24cb 100644 --- a/homeassistant/components/streamlabswater/icons.json +++ b/homeassistant/components/streamlabswater/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_away_mode": "mdi:home" + "set_away_mode": { + "service": "mdi:home" + } } } diff --git a/homeassistant/components/stt/__init__.py b/homeassistant/components/stt/__init__.py index 676d8b8aa76..f82d6c2ab93 100644 --- a/homeassistant/components/stt/__init__.py +++ b/homeassistant/components/stt/__init__.py @@ -72,9 +72,18 @@ CONFIG_SCHEMA = cv.empty_config_schema(DOMAIN) @callback def async_default_engine(hass: HomeAssistant) -> str | None: """Return the domain or entity id of the default engine.""" - return async_default_provider(hass) or next( - iter(hass.states.async_entity_ids(DOMAIN)), None - ) + component: EntityComponent[SpeechToTextEntity] = hass.data[DOMAIN] + + default_entity_id: str | None = None + + for entity in component.entities: + if entity.platform and entity.platform.platform_name == "cloud": + return entity.entity_id + + if default_entity_id is None: + default_entity_id = entity.entity_id + + return default_entity_id or async_default_provider(hass) @callback @@ -439,6 +448,7 @@ def websocket_list_engines( for engine_id, provider in legacy_providers.items(): provider_info = { "engine_id": engine_id, + "name": provider.name, "supported_languages": provider.supported_languages, } if language: diff --git a/homeassistant/components/subaru/config_flow.py b/homeassistant/components/subaru/config_flow.py index 5ecaf9670d7..3d96a89a14f 100644 --- a/homeassistant/components/subaru/config_flow.py +++ b/homeassistant/components/subaru/config_flow.py @@ -4,7 +4,7 @@ from __future__ import annotations from datetime import datetime import logging -from typing import Any +from typing import TYPE_CHECKING, Any from subarulink import ( Controller as SubaruAPI, @@ -44,10 +44,10 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize config flow.""" - self.config_data = {CONF_PIN: None} - self.controller = None + self.config_data: dict[str, Any] = {CONF_PIN: None} + self.controller: SubaruAPI | None = None async def async_step_user( self, user_input: dict[str, Any] | None = None @@ -66,6 +66,8 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): _LOGGER.error("Unable to communicate with Subaru API: %s", ex.message) return self.async_abort(reason="cannot_connect") else: + if TYPE_CHECKING: + assert self.controller if not self.controller.device_registered: _LOGGER.debug("2FA validation is required") return await self.async_step_two_factor() @@ -137,6 +139,8 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Select contact method and request 2FA code from Subaru.""" error = None + if TYPE_CHECKING: + assert self.controller if user_input: # self.controller.contact_methods is a dict: # {"phone":"555-555-5555", "userName":"my@email.com"} @@ -165,6 +169,8 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Validate received 2FA code with Subaru.""" error = None + if TYPE_CHECKING: + assert self.controller if user_input: try: vol.Match(r"^[0-9]{6}$")(user_input[CONF_VALIDATION_CODE]) @@ -190,6 +196,8 @@ class SubaruConfigFlow(ConfigFlow, domain=DOMAIN): ) -> ConfigFlowResult: """Handle second part of config flow, if required.""" error = None + if TYPE_CHECKING: + assert self.controller if user_input and self.controller.update_saved_pin(user_input[CONF_PIN]): try: vol.Match(r"[0-9]{4}")(user_input[CONF_PIN]) diff --git a/homeassistant/components/subaru/icons.json b/homeassistant/components/subaru/icons.json index f6c3597c3c3..ca8648296c7 100644 --- a/homeassistant/components/subaru/icons.json +++ b/homeassistant/components/subaru/icons.json @@ -24,6 +24,8 @@ } }, "services": { - "unlock_specific_door": "mdi:lock-open-variant" + "unlock_specific_door": { + "service": "mdi:lock-open-variant" + } } } diff --git a/homeassistant/components/sun/config_flow.py b/homeassistant/components/sun/config_flow.py index 30b64c60b9f..16c465be8ad 100644 --- a/homeassistant/components/sun/config_flow.py +++ b/homeassistant/components/sun/config_flow.py @@ -23,6 +23,6 @@ class SunConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user") - async def async_step_import(self, user_input: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import from configuration.yaml.""" - return await self.async_step_user(user_input) + return await self.async_step_user(import_data) diff --git a/homeassistant/components/surepetcare/icons.json b/homeassistant/components/surepetcare/icons.json index 1db15b599df..0daad594c48 100644 --- a/homeassistant/components/surepetcare/icons.json +++ b/homeassistant/components/surepetcare/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_lock_state": "mdi:lock", - "set_pet_location": "mdi:dog" + "set_lock_state": { + "service": "mdi:lock" + }, + "set_pet_location": { + "service": "mdi:dog" + } } } diff --git a/homeassistant/components/swiss_public_transport/__init__.py b/homeassistant/components/swiss_public_transport/__init__.py index 83b47d64f17..dc1d0eb236c 100644 --- a/homeassistant/components/swiss_public_transport/__init__.py +++ b/homeassistant/components/swiss_public_transport/__init__.py @@ -11,18 +11,32 @@ from opendata_transport.exceptions import ( from homeassistant import config_entries, core from homeassistant.const import Platform from homeassistant.exceptions import ConfigEntryError, ConfigEntryNotReady -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + config_validation as cv, + device_registry as dr, + entity_registry as er, +) from homeassistant.helpers.aiohttp_client import async_get_clientsession +from homeassistant.helpers.typing import ConfigType from .const import CONF_DESTINATION, CONF_START, CONF_VIA, DOMAIN, PLACEHOLDERS from .coordinator import SwissPublicTransportDataUpdateCoordinator from .helper import unique_id_from_config +from .services import setup_services _LOGGER = logging.getLogger(__name__) PLATFORMS: list[Platform] = [Platform.SENSOR] +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) + + +async def async_setup(hass: core.HomeAssistant, config: ConfigType) -> bool: + """Set up the Swiss public transport component.""" + setup_services(hass) + return True + async def async_setup_entry( hass: core.HomeAssistant, entry: config_entries.ConfigEntry diff --git a/homeassistant/components/swiss_public_transport/const.py b/homeassistant/components/swiss_public_transport/const.py index 32b6427ced5..c02f36f2f25 100644 --- a/homeassistant/components/swiss_public_transport/const.py +++ b/homeassistant/components/swiss_public_transport/const.py @@ -9,12 +9,19 @@ CONF_START: Final = "from" CONF_VIA: Final = "via" DEFAULT_NAME = "Next Destination" +DEFAULT_UPDATE_TIME = 90 MAX_VIA = 5 -SENSOR_CONNECTIONS_COUNT = 3 +CONNECTIONS_COUNT = 3 +CONNECTIONS_MAX = 15 PLACEHOLDERS = { "stationboard_url": "http://transport.opendata.ch/examples/stationboard.html", "opendata_url": "http://transport.opendata.ch", } + +ATTR_CONFIG_ENTRY_ID: Final = "config_entry_id" +ATTR_LIMIT: Final = "limit" + +SERVICE_FETCH_CONNECTIONS = "fetch_connections" diff --git a/homeassistant/components/swiss_public_transport/coordinator.py b/homeassistant/components/swiss_public_transport/coordinator.py index ae7e1b2366d..114215520ac 100644 --- a/homeassistant/components/swiss_public_transport/coordinator.py +++ b/homeassistant/components/swiss_public_transport/coordinator.py @@ -7,14 +7,17 @@ import logging from typing import TypedDict from opendata_transport import OpendataTransport -from opendata_transport.exceptions import OpendataTransportError +from opendata_transport.exceptions import ( + OpendataTransportConnectionError, + OpendataTransportError, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed import homeassistant.util.dt as dt_util -from .const import DOMAIN, SENSOR_CONNECTIONS_COUNT +from .const import CONNECTIONS_COUNT, DEFAULT_UPDATE_TIME, DOMAIN _LOGGER = logging.getLogger(__name__) @@ -54,7 +57,7 @@ class SwissPublicTransportDataUpdateCoordinator( hass, _LOGGER, name=DOMAIN, - update_interval=timedelta(seconds=90), + update_interval=timedelta(seconds=DEFAULT_UPDATE_TIME), ) self._opendata = opendata @@ -74,14 +77,21 @@ class SwissPublicTransportDataUpdateCoordinator( return None async def _async_update_data(self) -> list[DataConnection]: + return await self.fetch_connections(limit=CONNECTIONS_COUNT) + + async def fetch_connections(self, limit: int) -> list[DataConnection]: + """Fetch connections using the opendata api.""" + self._opendata.limit = limit try: await self._opendata.async_get_data() + except OpendataTransportConnectionError as e: + _LOGGER.warning("Connection to transport.opendata.ch cannot be established") + raise UpdateFailed from e except OpendataTransportError as e: _LOGGER.warning( "Unable to connect and retrieve data from transport.opendata.ch" ) raise UpdateFailed from e - connections = self._opendata.connections return [ DataConnection( @@ -95,6 +105,6 @@ class SwissPublicTransportDataUpdateCoordinator( remaining_time=str(self.remaining_time(connections[i]["departure"])), delay=connections[i]["delay"], ) - for i in range(SENSOR_CONNECTIONS_COUNT) + for i in range(limit) if len(connections) > i and connections[i] is not None ] diff --git a/homeassistant/components/swiss_public_transport/icons.json b/homeassistant/components/swiss_public_transport/icons.json index 10573b8f5c3..0f868c18c1f 100644 --- a/homeassistant/components/swiss_public_transport/icons.json +++ b/homeassistant/components/swiss_public_transport/icons.json @@ -23,5 +23,10 @@ "default": "mdi:clock-plus" } } + }, + "services": { + "fetch_connections": { + "service": "mdi:bus-clock" + } } } diff --git a/homeassistant/components/swiss_public_transport/sensor.py b/homeassistant/components/swiss_public_transport/sensor.py index 88a6dbecae4..c186b963705 100644 --- a/homeassistant/components/swiss_public_transport/sensor.py +++ b/homeassistant/components/swiss_public_transport/sensor.py @@ -20,7 +20,7 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity -from .const import DOMAIN, SENSOR_CONNECTIONS_COUNT +from .const import CONNECTIONS_COUNT, DOMAIN from .coordinator import DataConnection, SwissPublicTransportDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) @@ -46,7 +46,7 @@ SENSORS: tuple[SwissPublicTransportSensorEntityDescription, ...] = ( value_fn=lambda data_connection: data_connection["departure"], index=i, ) - for i in range(SENSOR_CONNECTIONS_COUNT) + for i in range(CONNECTIONS_COUNT) ], SwissPublicTransportSensorEntityDescription( key="duration", diff --git a/homeassistant/components/swiss_public_transport/services.py b/homeassistant/components/swiss_public_transport/services.py new file mode 100644 index 00000000000..e8b7c6bd458 --- /dev/null +++ b/homeassistant/components/swiss_public_transport/services.py @@ -0,0 +1,89 @@ +"""Define services for the Swiss public transport integration.""" + +import voluptuous as vol + +from homeassistant import config_entries +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import ( + HomeAssistant, + ServiceCall, + ServiceResponse, + SupportsResponse, +) +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers.selector import ( + NumberSelector, + NumberSelectorConfig, + NumberSelectorMode, +) +from homeassistant.helpers.update_coordinator import UpdateFailed + +from .const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_LIMIT, + CONNECTIONS_COUNT, + CONNECTIONS_MAX, + DOMAIN, + SERVICE_FETCH_CONNECTIONS, +) + +SERVICE_FETCH_CONNECTIONS_SCHEMA = vol.Schema( + { + vol.Required(ATTR_CONFIG_ENTRY_ID): str, + vol.Optional(ATTR_LIMIT, default=CONNECTIONS_COUNT): NumberSelector( + NumberSelectorConfig( + min=1, max=CONNECTIONS_MAX, mode=NumberSelectorMode.BOX + ) + ), + } +) + + +def async_get_entry( + hass: HomeAssistant, config_entry_id: str +) -> config_entries.ConfigEntry: + """Get the Swiss public transport config entry.""" + if not (entry := hass.config_entries.async_get_entry(config_entry_id)): + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="config_entry_not_found", + translation_placeholders={"target": config_entry_id}, + ) + if entry.state is not ConfigEntryState.LOADED: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="not_loaded", + translation_placeholders={"target": entry.title}, + ) + return entry + + +def setup_services(hass: HomeAssistant) -> None: + """Set up the services for the Swiss public transport integration.""" + + async def async_fetch_connections( + call: ServiceCall, + ) -> ServiceResponse: + """Fetch a set of connections.""" + config_entry = async_get_entry(hass, call.data[ATTR_CONFIG_ENTRY_ID]) + limit = call.data.get(ATTR_LIMIT) or CONNECTIONS_COUNT + coordinator = hass.data[DOMAIN][config_entry.entry_id] + try: + connections = await coordinator.fetch_connections(limit=int(limit)) + except UpdateFailed as e: + raise HomeAssistantError( + translation_domain=DOMAIN, + translation_key="cannot_connect", + translation_placeholders={ + "error": str(e), + }, + ) from e + return {"connections": connections} + + hass.services.async_register( + DOMAIN, + SERVICE_FETCH_CONNECTIONS, + async_fetch_connections, + schema=SERVICE_FETCH_CONNECTIONS_SCHEMA, + supports_response=SupportsResponse.ONLY, + ) diff --git a/homeassistant/components/swiss_public_transport/services.yaml b/homeassistant/components/swiss_public_transport/services.yaml new file mode 100644 index 00000000000..d88dad2ca1f --- /dev/null +++ b/homeassistant/components/swiss_public_transport/services.yaml @@ -0,0 +1,14 @@ +fetch_connections: + fields: + config_entry_id: + required: true + selector: + config_entry: + integration: swiss_public_transport + limit: + example: 3 + selector: + number: + min: 1 + max: 15 + step: 1 diff --git a/homeassistant/components/swiss_public_transport/strings.json b/homeassistant/components/swiss_public_transport/strings.json index 4f4bc0522fc..29e73978538 100644 --- a/homeassistant/components/swiss_public_transport/strings.json +++ b/homeassistant/components/swiss_public_transport/strings.json @@ -49,12 +49,37 @@ } } }, + "services": { + "fetch_connections": { + "name": "Fetch Connections", + "description": "Fetch a list of connections from the swiss public transport.", + "fields": { + "config_entry_id": { + "name": "Instance", + "description": "Swiss public transport instance to fetch connections for." + }, + "limit": { + "name": "Limit", + "description": "Number of connections to fetch from [1-15]" + } + } + } + }, "exceptions": { "invalid_data": { "message": "Setup failed for entry {config_title} with invalid data, check at the [stationboard]({stationboard_url}) if your station names are valid.\n{error}" }, "request_timeout": { "message": "Timeout while connecting for entry {config_title}.\n{error}" + }, + "cannot_connect": { + "message": "Cannot connect to server.\n{error}" + }, + "not_loaded": { + "message": "{target} is not loaded." + }, + "config_entry_not_found": { + "message": "Swiss public transport integration instance \"{target}\" not found." } } } diff --git a/homeassistant/components/switch/__init__.py b/homeassistant/components/switch/__init__.py index 55e0a7a767e..43971741e51 100644 --- a/homeassistant/components/switch/__init__.py +++ b/homeassistant/components/switch/__init__.py @@ -79,9 +79,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) await component.async_setup(config) - component.async_register_entity_service(SERVICE_TURN_OFF, {}, "async_turn_off") - component.async_register_entity_service(SERVICE_TURN_ON, {}, "async_turn_on") - component.async_register_entity_service(SERVICE_TOGGLE, {}, "async_toggle") + component.async_register_entity_service(SERVICE_TURN_OFF, None, "async_turn_off") + component.async_register_entity_service(SERVICE_TURN_ON, None, "async_turn_on") + component.async_register_entity_service(SERVICE_TOGGLE, None, "async_toggle") return True diff --git a/homeassistant/components/switch/icons.json b/homeassistant/components/switch/icons.json index fbc1af5a126..10299a2ffc8 100644 --- a/homeassistant/components/switch/icons.json +++ b/homeassistant/components/switch/icons.json @@ -20,8 +20,14 @@ } }, "services": { - "toggle": "mdi:toggle-switch-variant", - "turn_off": "mdi:toggle-switch-variant-off", - "turn_on": "mdi:toggle-switch-variant" + "toggle": { + "service": "mdi:toggle-switch-variant" + }, + "turn_off": { + "service": "mdi:toggle-switch-variant-off" + }, + "turn_on": { + "service": "mdi:toggle-switch-variant" + } } } diff --git a/homeassistant/components/switchbot/manifest.json b/homeassistant/components/switchbot/manifest.json index 0cbbd70a805..f97162184c6 100644 --- a/homeassistant/components/switchbot/manifest.json +++ b/homeassistant/components/switchbot/manifest.json @@ -39,5 +39,5 @@ "documentation": "https://www.home-assistant.io/integrations/switchbot", "iot_class": "local_push", "loggers": ["switchbot"], - "requirements": ["PySwitchbot==0.48.1"] + "requirements": ["PySwitchbot==0.48.2"] } diff --git a/homeassistant/components/switcher_kis/button.py b/homeassistant/components/switcher_kis/button.py index b770c48c11c..2e559ba9f3b 100644 --- a/homeassistant/components/switcher_kis/button.py +++ b/homeassistant/components/switcher_kis/button.py @@ -137,6 +137,7 @@ class SwitcherThermostatButtonEntity( try: async with SwitcherType2Api( + self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, self.coordinator.data.device_key, diff --git a/homeassistant/components/switcher_kis/climate.py b/homeassistant/components/switcher_kis/climate.py index e6267e15305..511630251f2 100644 --- a/homeassistant/components/switcher_kis/climate.py +++ b/homeassistant/components/switcher_kis/climate.py @@ -169,6 +169,7 @@ class SwitcherClimateEntity( try: async with SwitcherType2Api( + self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, self.coordinator.data.device_key, diff --git a/homeassistant/components/switcher_kis/cover.py b/homeassistant/components/switcher_kis/cover.py index 258af3e1d5e..19c40d05e63 100644 --- a/homeassistant/components/switcher_kis/cover.py +++ b/homeassistant/components/switcher_kis/cover.py @@ -29,7 +29,7 @@ from .coordinator import SwitcherDataUpdateCoordinator _LOGGER = logging.getLogger(__name__) API_SET_POSITON = "set_position" -API_STOP = "stop" +API_STOP = "stop_shutter" async def async_setup_entry( @@ -98,6 +98,7 @@ class SwitcherCoverEntity( try: async with SwitcherType2Api( + self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, self.coordinator.data.device_key, diff --git a/homeassistant/components/switcher_kis/icons.json b/homeassistant/components/switcher_kis/icons.json index 4d3576f1a99..6ca8e0e8351 100644 --- a/homeassistant/components/switcher_kis/icons.json +++ b/homeassistant/components/switcher_kis/icons.json @@ -24,7 +24,11 @@ } }, "services": { - "set_auto_off": "mdi:progress-clock", - "turn_on_with_timer": "mdi:timer" + "set_auto_off": { + "service": "mdi:progress-clock" + }, + "turn_on_with_timer": { + "service": "mdi:timer" + } } } diff --git a/homeassistant/components/switcher_kis/manifest.json b/homeassistant/components/switcher_kis/manifest.json index 52b218fce9c..75ace60e942 100644 --- a/homeassistant/components/switcher_kis/manifest.json +++ b/homeassistant/components/switcher_kis/manifest.json @@ -7,6 +7,6 @@ "iot_class": "local_push", "loggers": ["aioswitcher"], "quality_scale": "platinum", - "requirements": ["aioswitcher==3.4.3"], + "requirements": ["aioswitcher==4.0.2"], "single_config_entry": true } diff --git a/homeassistant/components/switcher_kis/switch.py b/homeassistant/components/switcher_kis/switch.py index aac5da10ae1..c667a6dd473 100644 --- a/homeassistant/components/switcher_kis/switch.py +++ b/homeassistant/components/switcher_kis/switch.py @@ -117,6 +117,7 @@ class SwitcherBaseSwitchEntity( try: async with SwitcherType1Api( + self.coordinator.data.device_type, self.coordinator.data.ip_address, self.coordinator.data.device_id, self.coordinator.data.device_key, diff --git a/homeassistant/components/syncthing/config_flow.py b/homeassistant/components/syncthing/config_flow.py index 2d7d2ddcc92..86ea52c43a3 100644 --- a/homeassistant/components/syncthing/config_flow.py +++ b/homeassistant/components/syncthing/config_flow.py @@ -1,9 +1,11 @@ """Config flow for syncthing integration.""" +from typing import Any + import aiosyncthing import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_TOKEN, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -42,7 +44,9 @@ class SyncThingConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} diff --git a/homeassistant/components/syncthru/config_flow.py b/homeassistant/components/syncthru/config_flow.py index 8cd1c2c7b3b..180ba0d9e34 100644 --- a/homeassistant/components/syncthru/config_flow.py +++ b/homeassistant/components/syncthru/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Samsung SyncThru.""" import re +from typing import Any from urllib.parse import urlparse from pysyncthru import ConnectionMode, SyncThru, SyncThruAPINotSupported @@ -23,7 +24,9 @@ class SyncThruConfigFlow(ConfigFlow, domain=DOMAIN): url: str name: str - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle user initiated flow.""" if user_input is None: return await self._async_show_form(step_id="user") diff --git a/homeassistant/components/synology_dsm/__init__.py b/homeassistant/components/synology_dsm/__init__.py index d42dacca638..3619619782e 100644 --- a/homeassistant/components/synology_dsm/__init__.py +++ b/homeassistant/components/synology_dsm/__init__.py @@ -13,7 +13,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_MAC, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from .common import SynoApi, raise_config_entry_auth_error from .const import ( @@ -33,9 +33,6 @@ from .coordinator import ( from .models import SynologyDSMData from .service import async_setup_services -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - - _LOGGER = logging.getLogger(__name__) diff --git a/homeassistant/components/synology_dsm/icons.json b/homeassistant/components/synology_dsm/icons.json index 8e6d2b17f02..3c4d028dc7a 100644 --- a/homeassistant/components/synology_dsm/icons.json +++ b/homeassistant/components/synology_dsm/icons.json @@ -78,7 +78,11 @@ } }, "services": { - "reboot": "mdi:restart", - "shutdown": "mdi:power" + "reboot": { + "service": "mdi:restart" + }, + "shutdown": { + "service": "mdi:power" + } } } diff --git a/homeassistant/components/synology_dsm/manifest.json b/homeassistant/components/synology_dsm/manifest.json index 9d977609d14..5d42188357b 100644 --- a/homeassistant/components/synology_dsm/manifest.json +++ b/homeassistant/components/synology_dsm/manifest.json @@ -7,7 +7,7 @@ "documentation": "https://www.home-assistant.io/integrations/synology_dsm", "iot_class": "local_polling", "loggers": ["synology_dsm"], - "requirements": ["py-synologydsm-api==2.4.5"], + "requirements": ["py-synologydsm-api==2.5.2"], "ssdp": [ { "manufacturer": "Synology", diff --git a/homeassistant/components/synology_dsm/media_source.py b/homeassistant/components/synology_dsm/media_source.py index ace5733c222..d35b262809c 100644 --- a/homeassistant/components/synology_dsm/media_source.py +++ b/homeassistant/components/synology_dsm/media_source.py @@ -46,18 +46,24 @@ class SynologyPhotosMediaSourceIdentifier: self.cache_key = None self.file_name = None self.is_shared = False + self.passphrase = "" - if parts: - self.unique_id = parts[0] - if len(parts) > 1: - self.album_id = parts[1] - if len(parts) > 2: - self.cache_key = parts[2] - if len(parts) > 3: - self.file_name = parts[3] - if self.file_name.endswith(SHARED_SUFFIX): - self.is_shared = True - self.file_name = self.file_name.removesuffix(SHARED_SUFFIX) + self.unique_id = parts[0] + + if len(parts) > 1: + album_parts = parts[1].split("_") + self.album_id = album_parts[0] + if len(album_parts) > 1: + self.passphrase = parts[1].replace(f"{self.album_id}_", "") + + if len(parts) > 2: + self.cache_key = parts[2] + + if len(parts) > 3: + self.file_name = parts[3] + if self.file_name.endswith(SHARED_SUFFIX): + self.is_shared = True + self.file_name = self.file_name.removesuffix(SHARED_SUFFIX) class SynologyPhotosMediaSource(MediaSource): @@ -135,7 +141,7 @@ class SynologyPhotosMediaSource(MediaSource): ret.extend( BrowseMediaSource( domain=DOMAIN, - identifier=f"{item.identifier}/{album.album_id}", + identifier=f"{item.identifier}/{album.album_id}_{album.passphrase}", media_class=MediaClass.DIRECTORY, media_content_type=MediaClass.IMAGE, title=album.name, @@ -149,7 +155,7 @@ class SynologyPhotosMediaSource(MediaSource): # Request items of album # Get Items - album = SynoPhotosAlbum(int(identifier.album_id), "", 0) + album = SynoPhotosAlbum(int(identifier.album_id), "", 0, identifier.passphrase) try: album_items = await diskstation.api.photos.get_items_from_album( album, 0, 1000 @@ -170,7 +176,12 @@ class SynologyPhotosMediaSource(MediaSource): ret.append( BrowseMediaSource( domain=DOMAIN, - identifier=f"{identifier.unique_id}/{identifier.album_id}/{album_item.thumbnail_cache_key}/{album_item.file_name}{suffix}", + identifier=( + f"{identifier.unique_id}/" + f"{identifier.album_id}_{identifier.passphrase}/" + f"{album_item.thumbnail_cache_key}/" + f"{album_item.file_name}{suffix}" + ), media_class=MediaClass.IMAGE, media_content_type=mime_type, title=album_item.file_name, @@ -197,7 +208,12 @@ class SynologyPhotosMediaSource(MediaSource): if identifier.is_shared: suffix = SHARED_SUFFIX return PlayMedia( - f"/synology_dsm/{identifier.unique_id}/{identifier.cache_key}/{identifier.file_name}{suffix}", + ( + f"/synology_dsm/{identifier.unique_id}/" + f"{identifier.cache_key}/" + f"{identifier.file_name}{suffix}/" + f"{identifier.passphrase}" + ), mime_type, ) @@ -231,18 +247,24 @@ class SynologyDsmMediaView(http.HomeAssistantView): if not self.hass.data.get(DOMAIN): raise web.HTTPNotFound # location: {cache_key}/{filename} - cache_key, file_name = location.split("/") + cache_key, file_name, passphrase = location.split("/") image_id = int(cache_key.split("_")[0]) + if shared := file_name.endswith(SHARED_SUFFIX): file_name = file_name.removesuffix(SHARED_SUFFIX) + mime_type, _ = mimetypes.guess_type(file_name) if not isinstance(mime_type, str): raise web.HTTPNotFound + diskstation: SynologyDSMData = self.hass.data[DOMAIN][source_dir_id] assert diskstation.api.photos is not None - item = SynoPhotosItem(image_id, "", "", "", cache_key, "", shared) + item = SynoPhotosItem(image_id, "", "", "", cache_key, "xl", shared, passphrase) try: - image = await diskstation.api.photos.download_item(item) + if passphrase: + image = await diskstation.api.photos.download_item_thumbnail(item) + else: + image = await diskstation.api.photos.download_item(item) except SynologyDSMException as exc: raise web.HTTPNotFound from exc return web.Response(body=image, content_type=mime_type) diff --git a/homeassistant/components/system_bridge/icons.json b/homeassistant/components/system_bridge/icons.json index cc648889f0b..a03f77049a3 100644 --- a/homeassistant/components/system_bridge/icons.json +++ b/homeassistant/components/system_bridge/icons.json @@ -1,11 +1,25 @@ { "services": { - "get_process_by_id": "mdi:console", - "get_processes_by_name": "mdi:console", - "open_path": "mdi:folder-open", - "open_url": "mdi:web", - "send_keypress": "mdi:keyboard", - "send_text": "mdi:keyboard", - "power_command": "mdi:power" + "get_process_by_id": { + "service": "mdi:console" + }, + "get_processes_by_name": { + "service": "mdi:console" + }, + "open_path": { + "service": "mdi:folder-open" + }, + "open_url": { + "service": "mdi:web" + }, + "send_keypress": { + "service": "mdi:keyboard" + }, + "send_text": { + "service": "mdi:keyboard" + }, + "power_command": { + "service": "mdi:power" + } } } diff --git a/homeassistant/components/system_log/icons.json b/homeassistant/components/system_log/icons.json index 436a6c34808..fe269c5154d 100644 --- a/homeassistant/components/system_log/icons.json +++ b/homeassistant/components/system_log/icons.json @@ -1,6 +1,10 @@ { "services": { - "clear": "mdi:delete", - "write": "mdi:pencil" + "clear": { + "service": "mdi:delete" + }, + "write": { + "service": "mdi:pencil" + } } } diff --git a/homeassistant/components/systemmonitor/__init__.py b/homeassistant/components/systemmonitor/__init__.py index 3fbc9edec2a..4a794a00432 100644 --- a/homeassistant/components/systemmonitor/__init__.py +++ b/homeassistant/components/systemmonitor/__init__.py @@ -73,7 +73,11 @@ async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Migrate old entry.""" - if entry.version == 1: + if entry.version > 1: + # This means the user has downgraded from a future version + return False + + if entry.version == 1 and entry.minor_version < 3: new_options = {**entry.options} if entry.minor_version == 1: # Migration copies process sensors to binary sensors @@ -84,6 +88,14 @@ async def async_migrate_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: entry, options=new_options, version=1, minor_version=2 ) + if entry.minor_version == 2: + new_options = {**entry.options} + if SENSOR_DOMAIN in new_options: + new_options.pop(SENSOR_DOMAIN) + hass.config_entries.async_update_entry( + entry, options=new_options, version=1, minor_version=3 + ) + _LOGGER.debug( "Migration to version %s.%s successful", entry.version, entry.minor_version ) diff --git a/homeassistant/components/systemmonitor/config_flow.py b/homeassistant/components/systemmonitor/config_flow.py index 0ff882d89da..34b28a1d47a 100644 --- a/homeassistant/components/systemmonitor/config_flow.py +++ b/homeassistant/components/systemmonitor/config_flow.py @@ -95,7 +95,7 @@ class SystemMonitorConfigFlowHandler(SchemaConfigFlowHandler, domain=DOMAIN): config_flow = CONFIG_FLOW options_flow = OPTIONS_FLOW VERSION = 1 - MINOR_VERSION = 2 + MINOR_VERSION = 3 def async_config_entry_title(self, options: Mapping[str, Any]) -> str: """Return config entry title.""" diff --git a/homeassistant/components/systemmonitor/repairs.py b/homeassistant/components/systemmonitor/repairs.py deleted file mode 100644 index 10b5d18830d..00000000000 --- a/homeassistant/components/systemmonitor/repairs.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Repairs platform for the System Monitor integration.""" - -from __future__ import annotations - -from typing import Any, cast - -from homeassistant import data_entry_flow -from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow -from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.helpers import entity_registry as er - - -class ProcessFixFlow(RepairsFlow): - """Handler for an issue fixing flow.""" - - def __init__(self, entry: ConfigEntry, processes: list[str]) -> None: - """Create flow.""" - super().__init__() - self.entry = entry - self._processes = processes - - async def async_step_init( - self, user_input: dict[str, str] | None = None - ) -> data_entry_flow.FlowResult: - """Handle the first step of a fix flow.""" - return await self.async_step_migrate_process_sensor() - - async def async_step_migrate_process_sensor( - self, user_input: dict[str, Any] | None = None - ) -> data_entry_flow.FlowResult: - """Handle the options step of a fix flow.""" - if user_input is None: - return self.async_show_form( - step_id="migrate_process_sensor", - description_placeholders={"processes": ", ".join(self._processes)}, - ) - - # Migration has copied the sensors to binary sensors - # Pop the sensors to repair and remove entities - new_options: dict[str, Any] = self.entry.options.copy() - new_options.pop(SENSOR_DOMAIN) - - entity_reg = er.async_get(self.hass) - entries = er.async_entries_for_config_entry(entity_reg, self.entry.entry_id) - for entry in entries: - if entry.entity_id.startswith("sensor.") and entry.unique_id.startswith( - "process_" - ): - entity_reg.async_remove(entry.entity_id) - - self.hass.config_entries.async_update_entry(self.entry, options=new_options) - await self.hass.config_entries.async_reload(self.entry.entry_id) - return self.async_create_entry(data={}) - - -async def async_create_fix_flow( - hass: HomeAssistant, - issue_id: str, - data: dict[str, Any] | None, -) -> RepairsFlow: - """Create flow.""" - entry = None - if data and (entry_id := data.get("entry_id")): - entry_id = cast(str, entry_id) - processes: list[str] = data["processes"] - entry = hass.config_entries.async_get_entry(entry_id) - assert entry - return ProcessFixFlow(entry, processes) - - return ConfirmRepairFlow() diff --git a/homeassistant/components/systemmonitor/sensor.py b/homeassistant/components/systemmonitor/sensor.py index bad4c3be0b5..ef1153f09e8 100644 --- a/homeassistant/components/systemmonitor/sensor.py +++ b/homeassistant/components/systemmonitor/sensor.py @@ -14,8 +14,6 @@ import sys import time from typing import Any, Literal -from psutil import NoSuchProcess - from homeassistant.components.sensor import ( DOMAIN as SENSOR_DOMAIN, SensorDeviceClass, @@ -25,8 +23,6 @@ from homeassistant.components.sensor import ( ) from homeassistant.const import ( PERCENTAGE, - STATE_OFF, - STATE_ON, EntityCategory, UnitOfDataRate, UnitOfInformation, @@ -36,13 +32,12 @@ from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo from homeassistant.helpers.entity_platform import AddEntitiesCallback -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from homeassistant.helpers.typing import StateType from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.util import slugify from . import SystemMonitorConfigEntry -from .const import CONF_PROCESS, DOMAIN, NET_IO_TYPES +from .const import DOMAIN, NET_IO_TYPES from .coordinator import SystemMonitorCoordinator from .util import get_all_disk_mounts, get_all_network_interfaces, read_cpu_temperature @@ -68,24 +63,6 @@ def get_cpu_icon() -> Literal["mdi:cpu-64-bit", "mdi:cpu-32-bit"]: return "mdi:cpu-32-bit" -def get_process(entity: SystemMonitorSensor) -> str: - """Return process.""" - state = STATE_OFF - for proc in entity.coordinator.data.processes: - try: - _LOGGER.debug("process %s for argument %s", proc.name(), entity.argument) - if entity.argument == proc.name(): - state = STATE_ON - break - except NoSuchProcess as err: - _LOGGER.warning( - "Failed to load process with ID: %s, old name: %s", - err.pid, - err.name, - ) - return state - - def get_network(entity: SystemMonitorSensor) -> float | None: """Return network in and out.""" counters = entity.coordinator.data.io_counters @@ -341,15 +318,6 @@ SENSOR_TYPES: dict[str, SysMonitorSensorEntityDescription] = { value_fn=get_throughput, add_to_update=lambda entity: ("io_counters", ""), ), - "process": SysMonitorSensorEntityDescription( - key="process", - translation_key="process", - placeholder="process", - icon=get_cpu_icon(), - mandatory_arg=True, - value_fn=get_process, - add_to_update=lambda entity: ("processes", ""), - ), "processor_use": SysMonitorSensorEntityDescription( key="processor_use", translation_key="processor_use", @@ -551,35 +519,6 @@ async def async_setup_entry( ) continue - if _type == "process": - _entry = entry.options.get(SENSOR_DOMAIN, {}) - for argument in _entry.get(CONF_PROCESS, []): - loaded_resources.add(slugify(f"{_type}_{argument}")) - entities.append( - SystemMonitorSensor( - coordinator, - sensor_description, - entry.entry_id, - argument, - True, - ) - ) - async_create_issue( - hass, - DOMAIN, - "process_sensor", - breaks_in_ha_version="2024.9.0", - is_fixable=True, - is_persistent=False, - severity=IssueSeverity.WARNING, - translation_key="process_sensor", - data={ - "entry_id": entry.entry_id, - "processes": _entry[CONF_PROCESS], - }, - ) - continue - if _type == "processor_use": argument = "" is_enabled = check_legacy_resource(f"{_type}_{argument}", legacy_resources) diff --git a/homeassistant/components/systemmonitor/strings.json b/homeassistant/components/systemmonitor/strings.json index aae2463c9da..dde97918bc3 100644 --- a/homeassistant/components/systemmonitor/strings.json +++ b/homeassistant/components/systemmonitor/strings.json @@ -22,19 +22,6 @@ } } }, - "issues": { - "process_sensor": { - "title": "Process sensors are deprecated and will be removed", - "fix_flow": { - "step": { - "migrate_process_sensor": { - "title": "Process sensors have been setup as binary sensors", - "description": "Process sensors `{processes}` have been created as binary sensors and the sensors will be removed in 2024.9.0.\n\nPlease update all automations, scripts, dashboards or other things depending on these sensors to use the newly created binary sensors instead and press **Submit** to fix this issue." - } - } - } - } - }, "entity": { "binary_sensor": { "process": { diff --git a/homeassistant/components/tado/__init__.py b/homeassistant/components/tado/__init__.py index 2c853a0e6e3..084819d8e68 100644 --- a/homeassistant/components/tado/__init__.py +++ b/homeassistant/components/tado/__init__.py @@ -44,7 +44,7 @@ MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=4) SCAN_INTERVAL = timedelta(minutes=5) SCAN_MOBILE_DEVICE_INTERVAL = timedelta(seconds=30) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: diff --git a/homeassistant/components/tado/climate.py b/homeassistant/components/tado/climate.py index 314a2315d0a..60096c25301 100644 --- a/homeassistant/components/tado/climate.py +++ b/homeassistant/components/tado/climate.py @@ -16,6 +16,7 @@ from homeassistant.components.climate import ( SWING_BOTH, SWING_HORIZONTAL, SWING_OFF, + SWING_ON, SWING_VERTICAL, ClimateEntity, ClimateEntityFeature, @@ -47,7 +48,6 @@ from .const import ( HA_TO_TADO_FAN_MODE_MAP, HA_TO_TADO_FAN_MODE_MAP_LEGACY, HA_TO_TADO_HVAC_MODE_MAP, - HA_TO_TADO_SWING_MODE_MAP, ORDERED_KNOWN_TADO_MODES, PRESET_AUTO, SIGNAL_TADO_UPDATE_RECEIVED, @@ -55,17 +55,20 @@ from .const import ( SUPPORT_PRESET_MANUAL, TADO_DEFAULT_MAX_TEMP, TADO_DEFAULT_MIN_TEMP, - TADO_FAN_LEVELS, - TADO_FAN_SPEEDS, + TADO_FANLEVEL_SETTING, + TADO_FANSPEED_SETTING, + TADO_HORIZONTAL_SWING_SETTING, TADO_HVAC_ACTION_TO_HA_HVAC_ACTION, TADO_MODES_WITH_NO_TEMP_SETTING, TADO_SWING_OFF, TADO_SWING_ON, + TADO_SWING_SETTING, TADO_TO_HA_FAN_MODE_MAP, TADO_TO_HA_FAN_MODE_MAP_LEGACY, TADO_TO_HA_HVAC_MODE_MAP, TADO_TO_HA_OFFSET_MAP, TADO_TO_HA_SWING_MODE_MAP, + TADO_VERTICAL_SWING_SETTING, TEMP_OFFSET, TYPE_AIR_CONDITIONING, TYPE_HEATING, @@ -166,29 +169,30 @@ def create_climate_entity( supported_hvac_modes.append(TADO_TO_HA_HVAC_MODE_MAP[mode]) if ( - capabilities[mode].get("swings") - or capabilities[mode].get("verticalSwing") - or capabilities[mode].get("horizontalSwing") + TADO_SWING_SETTING in capabilities[mode] + or TADO_VERTICAL_SWING_SETTING in capabilities[mode] + or TADO_VERTICAL_SWING_SETTING in capabilities[mode] ): support_flags |= ClimateEntityFeature.SWING_MODE supported_swing_modes = [] - if capabilities[mode].get("swings"): + if TADO_SWING_SETTING in capabilities[mode]: supported_swing_modes.append( TADO_TO_HA_SWING_MODE_MAP[TADO_SWING_ON] ) - if capabilities[mode].get("verticalSwing"): + if TADO_VERTICAL_SWING_SETTING in capabilities[mode]: supported_swing_modes.append(SWING_VERTICAL) - if capabilities[mode].get("horizontalSwing"): + if TADO_HORIZONTAL_SWING_SETTING in capabilities[mode]: supported_swing_modes.append(SWING_HORIZONTAL) if ( SWING_HORIZONTAL in supported_swing_modes - and SWING_HORIZONTAL in supported_swing_modes + and SWING_VERTICAL in supported_swing_modes ): supported_swing_modes.append(SWING_BOTH) supported_swing_modes.append(TADO_TO_HA_SWING_MODE_MAP[TADO_SWING_OFF]) - if not capabilities[mode].get("fanSpeeds") and not capabilities[mode].get( - "fanLevel" + if ( + TADO_FANSPEED_SETTING not in capabilities[mode] + and TADO_FANLEVEL_SETTING not in capabilities[mode] ): continue @@ -197,14 +201,15 @@ def create_climate_entity( if supported_fan_modes: continue - if capabilities[mode].get("fanSpeeds"): + if TADO_FANSPEED_SETTING in capabilities[mode]: supported_fan_modes = generate_supported_fanmodes( - TADO_TO_HA_FAN_MODE_MAP_LEGACY, capabilities[mode]["fanSpeeds"] + TADO_TO_HA_FAN_MODE_MAP_LEGACY, + capabilities[mode][TADO_FANSPEED_SETTING], ) else: supported_fan_modes = generate_supported_fanmodes( - TADO_TO_HA_FAN_MODE_MAP, capabilities[mode]["fanLevel"] + TADO_TO_HA_FAN_MODE_MAP, capabilities[mode][TADO_FANLEVEL_SETTING] ) cool_temperatures = capabilities[CONST_MODE_COOL]["temperatures"] @@ -316,12 +321,16 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._target_temp: float | None = None self._current_tado_fan_speed = CONST_FAN_OFF + self._current_tado_fan_level = CONST_FAN_OFF self._current_tado_hvac_mode = CONST_MODE_OFF self._current_tado_hvac_action = HVACAction.OFF self._current_tado_swing_mode = TADO_SWING_OFF self._current_tado_vertical_swing = TADO_SWING_OFF self._current_tado_horizontal_swing = TADO_SWING_OFF + capabilities = tado.get_capabilities(zone_id) + self._current_tado_capabilities = capabilities + self._tado_zone_data: PyTado.TadoZone = {} self._tado_geofence_data: dict[str, str] | None = None @@ -382,20 +391,23 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): def fan_mode(self) -> str | None: """Return the fan setting.""" if self._ac_device: - return TADO_TO_HA_FAN_MODE_MAP.get( - self._current_tado_fan_speed, - TADO_TO_HA_FAN_MODE_MAP_LEGACY.get( + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + return TADO_TO_HA_FAN_MODE_MAP_LEGACY.get( self._current_tado_fan_speed, FAN_AUTO - ), - ) + ) + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + return TADO_TO_HA_FAN_MODE_MAP.get( + self._current_tado_fan_level, FAN_AUTO + ) + return FAN_AUTO return None def set_fan_mode(self, fan_mode: str) -> None: """Turn fan on/off.""" - if self._current_tado_fan_speed in TADO_FAN_LEVELS: - self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode]) - else: + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP_LEGACY[fan_mode]) + elif self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._control_hvac(fan_mode=HA_TO_TADO_FAN_MODE_MAP[fan_mode]) @property def preset_mode(self) -> str: @@ -555,24 +567,30 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): swing = None if self._attr_swing_modes is None: return - if ( - SWING_VERTICAL in self._attr_swing_modes - or SWING_HORIZONTAL in self._attr_swing_modes - ): - if swing_mode == SWING_VERTICAL: + if swing_mode == SWING_OFF: + if self._is_valid_setting_for_hvac_mode(TADO_SWING_SETTING): + swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + horizontal_swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + vertical_swing = TADO_SWING_OFF + if swing_mode == SWING_ON: + swing = TADO_SWING_ON + if swing_mode == SWING_VERTICAL: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): vertical_swing = TADO_SWING_ON - elif swing_mode == SWING_HORIZONTAL: + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + horizontal_swing = TADO_SWING_OFF + if swing_mode == SWING_HORIZONTAL: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + vertical_swing = TADO_SWING_OFF + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): horizontal_swing = TADO_SWING_ON - elif swing_mode == SWING_BOTH: + if swing_mode == SWING_BOTH: + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): vertical_swing = TADO_SWING_ON + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): horizontal_swing = TADO_SWING_ON - elif swing_mode == SWING_OFF: - if SWING_VERTICAL in self._attr_swing_modes: - vertical_swing = TADO_SWING_OFF - if SWING_HORIZONTAL in self._attr_swing_modes: - horizontal_swing = TADO_SWING_OFF - else: - swing = HA_TO_TADO_SWING_MODE_MAP[swing_mode] self._control_hvac( swing_mode=swing, @@ -596,21 +614,23 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._device_id ][TEMP_OFFSET][offset_key] - self._current_tado_fan_speed = ( - self._tado_zone_data.current_fan_level - if self._tado_zone_data.current_fan_level is not None - else self._tado_zone_data.current_fan_speed - ) - self._current_tado_hvac_mode = self._tado_zone_data.current_hvac_mode self._current_tado_hvac_action = self._tado_zone_data.current_hvac_action - self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode - self._current_tado_vertical_swing = ( - self._tado_zone_data.current_vertical_swing_mode - ) - self._current_tado_horizontal_swing = ( - self._tado_zone_data.current_horizontal_swing_mode - ) + + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._current_tado_fan_level = self._tado_zone_data.current_fan_level + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + self._current_tado_fan_speed = self._tado_zone_data.current_fan_speed + if self._is_valid_setting_for_hvac_mode(TADO_SWING_SETTING): + self._current_tado_swing_mode = self._tado_zone_data.current_swing_mode + if self._is_valid_setting_for_hvac_mode(TADO_VERTICAL_SWING_SETTING): + self._current_tado_vertical_swing = ( + self._tado_zone_data.current_vertical_swing_mode + ) + if self._is_valid_setting_for_hvac_mode(TADO_HORIZONTAL_SWING_SETTING): + self._current_tado_horizontal_swing = ( + self._tado_zone_data.current_horizontal_swing_mode + ) @callback def _async_update_zone_callback(self) -> None: @@ -665,7 +685,10 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): self._target_temp = target_temp if fan_mode: - self._current_tado_fan_speed = fan_mode + if self._is_valid_setting_for_hvac_mode(TADO_FANSPEED_SETTING): + self._current_tado_fan_speed = fan_mode + if self._is_valid_setting_for_hvac_mode(TADO_FANLEVEL_SETTING): + self._current_tado_fan_level = fan_mode if swing_mode: self._current_tado_swing_mode = swing_mode @@ -735,21 +758,32 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): fan_speed = None fan_level = None if self.supported_features & ClimateEntityFeature.FAN_MODE: - if self._current_tado_fan_speed in TADO_FAN_LEVELS: - fan_level = self._current_tado_fan_speed - elif self._current_tado_fan_speed in TADO_FAN_SPEEDS: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_FANSPEED_SETTING, self._current_tado_fan_speed + ): fan_speed = self._current_tado_fan_speed + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_FANLEVEL_SETTING, self._current_tado_fan_level + ): + fan_level = self._current_tado_fan_level + swing = None vertical_swing = None horizontal_swing = None if ( self.supported_features & ClimateEntityFeature.SWING_MODE ) and self._attr_swing_modes is not None: - if SWING_VERTICAL in self._attr_swing_modes: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_VERTICAL_SWING_SETTING, self._current_tado_vertical_swing + ): vertical_swing = self._current_tado_vertical_swing - if SWING_HORIZONTAL in self._attr_swing_modes: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_HORIZONTAL_SWING_SETTING, self._current_tado_horizontal_swing + ): horizontal_swing = self._current_tado_horizontal_swing - if vertical_swing is None and horizontal_swing is None: + if self._is_current_setting_supported_by_current_hvac_mode( + TADO_SWING_SETTING, self._current_tado_swing_mode + ): swing = self._current_tado_swing_mode self._tado.set_zone_overlay( @@ -765,3 +799,20 @@ class TadoClimate(TadoZoneEntity, ClimateEntity): vertical_swing=vertical_swing, # api defaults to not sending verticalSwing if swing not None horizontal_swing=horizontal_swing, # api defaults to not sending horizontalSwing if swing not None ) + + def _is_valid_setting_for_hvac_mode(self, setting: str) -> bool: + return ( + self._current_tado_capabilities.get(self._current_tado_hvac_mode, {}).get( + setting + ) + is not None + ) + + def _is_current_setting_supported_by_current_hvac_mode( + self, setting: str, current_state: str | None + ) -> bool: + if self._is_valid_setting_for_hvac_mode(setting): + return current_state in self._current_tado_capabilities[ + self._current_tado_hvac_mode + ].get(setting, []) + return False diff --git a/homeassistant/components/tado/const.py b/homeassistant/components/tado/const.py index 5c6a80c5beb..8033a653325 100644 --- a/homeassistant/components/tado/const.py +++ b/homeassistant/components/tado/const.py @@ -234,3 +234,10 @@ CONF_READING = "reading" ATTR_MESSAGE = "message" WATER_HEATER_FALLBACK_REPAIR = "water_heater_fallback" + +TADO_SWING_SETTING = "swings" +TADO_FANSPEED_SETTING = "fanSpeeds" + +TADO_FANLEVEL_SETTING = "fanLevel" +TADO_VERTICAL_SWING_SETTING = "verticalSwing" +TADO_HORIZONTAL_SWING_SETTING = "horizontalSwing" diff --git a/homeassistant/components/tado/icons.json b/homeassistant/components/tado/icons.json index 83ef6d4b332..c799bef0260 100644 --- a/homeassistant/components/tado/icons.json +++ b/homeassistant/components/tado/icons.json @@ -1,8 +1,16 @@ { "services": { - "set_climate_timer": "mdi:timer", - "set_water_heater_timer": "mdi:timer", - "set_climate_temperature_offset": "mdi:thermometer", - "add_meter_reading": "mdi:counter" + "set_climate_timer": { + "service": "mdi:timer" + }, + "set_water_heater_timer": { + "service": "mdi:timer" + }, + "set_climate_temperature_offset": { + "service": "mdi:thermometer" + }, + "add_meter_reading": { + "service": "mdi:counter" + } } } diff --git a/homeassistant/components/tailwind/config_flow.py b/homeassistant/components/tailwind/config_flow.py index 1cb94625266..13682a3e9c4 100644 --- a/homeassistant/components/tailwind/config_flow.py +++ b/homeassistant/components/tailwind/config_flow.py @@ -144,7 +144,9 @@ class TailwindFlowHandler(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle initiation of re-authentication with a Tailwind device.""" self.reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] diff --git a/homeassistant/components/tami4/config_flow.py b/homeassistant/components/tami4/config_flow.py index 0fa05bbebe4..72b19470f45 100644 --- a/homeassistant/components/tami4/config_flow.py +++ b/homeassistant/components/tami4/config_flow.py @@ -42,7 +42,7 @@ class Tami4ConfigFlow(ConfigFlow, domain=DOMAIN): if m := _PHONE_MATCHER.match(phone): self.phone = f"+972{m.group('number')}" else: - raise InvalidPhoneNumber + raise InvalidPhoneNumber # noqa: TRY301 await self.hass.async_add_executor_job( Tami4EdgeAPI.request_otp, self.phone ) diff --git a/homeassistant/components/tankerkoenig/__init__.py b/homeassistant/components/tankerkoenig/__init__.py index 78bced05b36..a500549a648 100644 --- a/homeassistant/components/tankerkoenig/__init__.py +++ b/homeassistant/components/tankerkoenig/__init__.py @@ -4,15 +4,12 @@ from __future__ import annotations from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from homeassistant.helpers import config_validation as cv from .const import DEFAULT_SCAN_INTERVAL, DOMAIN from .coordinator import TankerkoenigConfigEntry, TankerkoenigDataUpdateCoordinator PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry( hass: HomeAssistant, entry: TankerkoenigConfigEntry diff --git a/homeassistant/components/tasmota/sensor.py b/homeassistant/components/tasmota/sensor.py index e87ff88092e..30649fa38bd 100644 --- a/homeassistant/components/tasmota/sensor.py +++ b/homeassistant/components/tasmota/sensor.py @@ -22,7 +22,6 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, @@ -35,6 +34,7 @@ from homeassistant.const import ( UnitOfMass, UnitOfPower, UnitOfPressure, + UnitOfReactivePower, UnitOfSpeed, UnitOfTemperature, ) @@ -227,7 +227,7 @@ SENSOR_UNIT_MAP = { hc.PERCENTAGE: PERCENTAGE, hc.POWER_WATT: UnitOfPower.WATT, hc.PRESSURE_HPA: UnitOfPressure.HPA, - hc.REACTIVE_POWER: POWER_VOLT_AMPERE_REACTIVE, + hc.REACTIVE_POWER: UnitOfReactivePower.VOLT_AMPERE_REACTIVE, hc.SIGNAL_STRENGTH_DECIBELS: SIGNAL_STRENGTH_DECIBELS, hc.SIGNAL_STRENGTH_DECIBELS_MILLIWATT: SIGNAL_STRENGTH_DECIBELS_MILLIWATT, hc.SPEED_KILOMETERS_PER_HOUR: UnitOfSpeed.KILOMETERS_PER_HOUR, diff --git a/homeassistant/components/tcp/common.py b/homeassistant/components/tcp/common.py index d6a7fb28f11..263fc416026 100644 --- a/homeassistant/components/tcp/common.py +++ b/homeassistant/components/tcp/common.py @@ -25,7 +25,6 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import TemplateError import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity -from homeassistant.helpers.template import Template from homeassistant.helpers.typing import ConfigType from .const import ( @@ -63,10 +62,6 @@ class TcpEntity(Entity): def __init__(self, hass: HomeAssistant, config: ConfigType) -> None: """Set all the config values if they exist and get initial state.""" - value_template: Template | None = config.get(CONF_VALUE_TEMPLATE) - if value_template is not None: - value_template.hass = hass - self._hass = hass self._config: TcpSensorConfig = { CONF_NAME: config[CONF_NAME], @@ -75,7 +70,7 @@ class TcpEntity(Entity): CONF_TIMEOUT: config[CONF_TIMEOUT], CONF_PAYLOAD: config[CONF_PAYLOAD], CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT), - CONF_VALUE_TEMPLATE: value_template, + CONF_VALUE_TEMPLATE: config.get(CONF_VALUE_TEMPLATE), CONF_VALUE_ON: config.get(CONF_VALUE_ON), CONF_BUFFER_SIZE: config[CONF_BUFFER_SIZE], CONF_SSL: config[CONF_SSL], diff --git a/homeassistant/components/technove/binary_sensor.py b/homeassistant/components/technove/binary_sensor.py index a1ff9d16baf..1ecefe6f85c 100644 --- a/homeassistant/components/technove/binary_sensor.py +++ b/homeassistant/components/technove/binary_sensor.py @@ -4,19 +4,28 @@ from __future__ import annotations from collections.abc import Callable from dataclasses import dataclass +from typing import TYPE_CHECKING from technove import Station as TechnoVEStation from homeassistant.components.binary_sensor import ( + DOMAIN as BINARY_SENSOR_DOMAIN, BinarySensorDeviceClass, BinarySensorEntity, BinarySensorEntityDescription, ) from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.issue_registry import ( + IssueSeverity, + async_create_issue, + async_delete_issue, +) from . import TechnoVEConfigEntry +from .const import DOMAIN from .coordinator import TechnoVEDataUpdateCoordinator from .entity import TechnoVEEntity @@ -25,6 +34,7 @@ from .entity import TechnoVEEntity class TechnoVEBinarySensorDescription(BinarySensorEntityDescription): """Describes TechnoVE binary sensor entity.""" + deprecated_version: str | None = None value_fn: Callable[[TechnoVEStation], bool | None] @@ -52,6 +62,9 @@ BINARY_SENSORS = [ entity_category=EntityCategory.DIAGNOSTIC, device_class=BinarySensorDeviceClass.BATTERY_CHARGING, value_fn=lambda station: station.info.is_session_active, + deprecated_version="2025.2.0", + # Disabled by default, as this entity is deprecated + entity_registry_enabled_default=False, ), TechnoVEBinarySensorDescription( key="is_static_ip", @@ -100,3 +113,34 @@ class TechnoVEBinarySensorEntity(TechnoVEEntity, BinarySensorEntity): """Return the state of the sensor.""" return self.entity_description.value_fn(self.coordinator.data) + + async def async_added_to_hass(self) -> None: + """Raise issue when entity is registered and was not disabled.""" + if TYPE_CHECKING: + assert self.unique_id + if entity_id := er.async_get(self.hass).async_get_entity_id( + BINARY_SENSOR_DOMAIN, DOMAIN, self.unique_id + ): + if self.enabled and self.entity_description.deprecated_version: + async_create_issue( + self.hass, + DOMAIN, + f"deprecated_entity_{self.entity_description.key}", + breaks_in_ha_version=self.entity_description.deprecated_version, + is_fixable=False, + severity=IssueSeverity.WARNING, + translation_key=f"deprecated_entity_{self.entity_description.key}", + translation_placeholders={ + "sensor_name": self.name + if isinstance(self.name, str) + else entity_id, + "entity": entity_id, + }, + ) + else: + async_delete_issue( + self.hass, + DOMAIN, + f"deprecated_entity_{self.entity_description.key}", + ) + await super().async_added_to_hass() diff --git a/homeassistant/components/technove/icons.json b/homeassistant/components/technove/icons.json index ff47d3c32bc..52307405ff7 100644 --- a/homeassistant/components/technove/icons.json +++ b/homeassistant/components/technove/icons.json @@ -4,6 +4,11 @@ "ssid": { "default": "mdi:wifi" } + }, + "switch": { + "session_active": { + "default": "mdi:ev-station" + } } } } diff --git a/homeassistant/components/technove/strings.json b/homeassistant/components/technove/strings.json index 8799909d95c..06c93939db8 100644 --- a/homeassistant/components/technove/strings.json +++ b/homeassistant/components/technove/strings.json @@ -77,12 +77,24 @@ "switch": { "auto_charge": { "name": "Auto charge" + }, + "session_active": { + "name": "Charging Enabled" } } }, "exceptions": { "max_current_in_sharing_mode": { "message": "Cannot set the max current when power sharing mode is enabled." + }, + "set_charging_enabled_on_auto_charge": { + "message": "Cannot enable or disable charging when auto-charge is enabled. Try disabling auto-charge first." + } + }, + "issues": { + "deprecated_entity_is_session_active": { + "title": "The TechnoVE `{sensor_name}` binary sensor is deprecated", + "description": "`{entity}` is deprecated.\nPlease update your automations and scripts to replace the binary sensor entity with the newly added switch entity.\nWhen you are done migrating you can disable `{entity}`." } } } diff --git a/homeassistant/components/technove/switch.py b/homeassistant/components/technove/switch.py index bb9250215be..a8ad7581da5 100644 --- a/homeassistant/components/technove/switch.py +++ b/homeassistant/components/technove/switch.py @@ -2,30 +2,59 @@ from __future__ import annotations -from collections.abc import Awaitable, Callable +from collections.abc import Callable, Coroutine from dataclasses import dataclass from typing import Any -from technove import Station as TechnoVEStation, TechnoVE +from technove import Station as TechnoVEStation from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription from homeassistant.const import EntityCategory from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import TechnoVEConfigEntry +from .const import DOMAIN from .coordinator import TechnoVEDataUpdateCoordinator from .entity import TechnoVEEntity from .helpers import technove_exception_handler +async def _set_charging_enabled( + coordinator: TechnoVEDataUpdateCoordinator, enabled: bool +) -> None: + if coordinator.data.info.auto_charge: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="set_charging_enabled_on_auto_charge", + ) + await coordinator.technove.set_charging_enabled(enabled=enabled) + coordinator.data.info.is_session_active = enabled + coordinator.async_set_updated_data(coordinator.data) + + +async def _enable_charging(coordinator: TechnoVEDataUpdateCoordinator) -> None: + await _set_charging_enabled(coordinator, True) + + +async def _disable_charging(coordinator: TechnoVEDataUpdateCoordinator) -> None: + await _set_charging_enabled(coordinator, False) + + +async def _set_auto_charge( + coordinator: TechnoVEDataUpdateCoordinator, enabled: bool +) -> None: + await coordinator.technove.set_auto_charge(enabled=enabled) + + @dataclass(frozen=True, kw_only=True) class TechnoVESwitchDescription(SwitchEntityDescription): """Describes TechnoVE binary sensor entity.""" is_on_fn: Callable[[TechnoVEStation], bool] - turn_on_fn: Callable[[TechnoVE], Awaitable[dict[str, Any]]] - turn_off_fn: Callable[[TechnoVE], Awaitable[dict[str, Any]]] + turn_on_fn: Callable[[TechnoVEDataUpdateCoordinator], Coroutine[Any, Any, None]] + turn_off_fn: Callable[[TechnoVEDataUpdateCoordinator], Coroutine[Any, Any, None]] SWITCHES = [ @@ -34,8 +63,16 @@ SWITCHES = [ translation_key="auto_charge", entity_category=EntityCategory.CONFIG, is_on_fn=lambda station: station.info.auto_charge, - turn_on_fn=lambda technoVE: technoVE.set_auto_charge(enabled=True), - turn_off_fn=lambda technoVE: technoVE.set_auto_charge(enabled=False), + turn_on_fn=lambda coordinator: _set_auto_charge(coordinator, True), + turn_off_fn=lambda coordinator: _set_auto_charge(coordinator, False), + ), + TechnoVESwitchDescription( + key="session_active", + translation_key="session_active", + entity_category=EntityCategory.CONFIG, + is_on_fn=lambda station: station.info.is_session_active, + turn_on_fn=_enable_charging, + turn_off_fn=_disable_charging, ), ] @@ -76,11 +113,9 @@ class TechnoVESwitchEntity(TechnoVEEntity, SwitchEntity): @technove_exception_handler async def async_turn_on(self, **kwargs: Any) -> None: """Turn on the TechnoVE switch.""" - await self.entity_description.turn_on_fn(self.coordinator.technove) - await self.coordinator.async_request_refresh() + await self.entity_description.turn_on_fn(self.coordinator) @technove_exception_handler async def async_turn_off(self, **kwargs: Any) -> None: """Turn off the TechnoVE switch.""" - await self.entity_description.turn_off_fn(self.coordinator.technove) - await self.coordinator.async_request_refresh() + await self.entity_description.turn_off_fn(self.coordinator) diff --git a/homeassistant/components/telegram/icons.json b/homeassistant/components/telegram/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/telegram/icons.json +++ b/homeassistant/components/telegram/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/telegram_bot/__init__.py b/homeassistant/components/telegram_bot/__init__.py index fed9021a46e..2d53c744c22 100644 --- a/homeassistant/components/telegram_bot/__init__.py +++ b/homeassistant/components/telegram_bot/__init__.py @@ -41,6 +41,7 @@ from homeassistant.exceptions import TemplateError from homeassistant.helpers import config_validation as cv, issue_registry as ir from homeassistant.helpers.typing import ConfigType from homeassistant.loader import async_get_loaded_integration +from homeassistant.util.ssl import get_default_context, get_default_no_verify_context _LOGGER = logging.getLogger(__name__) @@ -378,7 +379,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: for p_config in domain_config: # Each platform config gets its own bot - bot = initialize_bot(hass, p_config) + bot = await hass.async_add_executor_job(initialize_bot, hass, p_config) p_type: str = p_config[CONF_PLATFORM] platform = platforms[p_type] @@ -408,7 +409,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ): data[attribute] = attribute_templ else: - attribute_templ.hass = hass try: data[attribute] = attribute_templ.async_render( parse_result=False @@ -487,7 +487,7 @@ def initialize_bot(hass: HomeAssistant, p_config: dict) -> Bot: # Auth can actually be stuffed into the URL, but the docs have previously # indicated to put them here. auth = proxy_params.pop("username"), proxy_params.pop("password") - ir.async_create_issue( + ir.create_issue( hass, DOMAIN, "proxy_params_auth_deprecation", @@ -504,7 +504,7 @@ def initialize_bot(hass: HomeAssistant, p_config: dict) -> Bot: learn_more_url="https://github.com/home-assistant/core/pull/112778", ) else: - ir.async_create_issue( + ir.create_issue( hass, DOMAIN, "proxy_params_deprecation", @@ -853,7 +853,11 @@ class TelegramNotificationService: username=kwargs.get(ATTR_USERNAME), password=kwargs.get(ATTR_PASSWORD), authentication=kwargs.get(ATTR_AUTHENTICATION), - verify_ssl=kwargs.get(ATTR_VERIFY_SSL), + verify_ssl=( + get_default_context() + if kwargs.get(ATTR_VERIFY_SSL, False) + else get_default_no_verify_context() + ), ) if file_content: diff --git a/homeassistant/components/telegram_bot/icons.json b/homeassistant/components/telegram_bot/icons.json index f410d387435..0acf20d561a 100644 --- a/homeassistant/components/telegram_bot/icons.json +++ b/homeassistant/components/telegram_bot/icons.json @@ -1,18 +1,46 @@ { "services": { - "send_message": "mdi:send", - "send_photo": "mdi:camera", - "send_sticker": "mdi:sticker", - "send_animation": "mdi:animation", - "send_video": "mdi:video", - "send_voice": "mdi:microphone", - "send_document": "mdi:file-document", - "send_location": "mdi:map-marker", - "send_poll": "mdi:poll", - "edit_message": "mdi:pencil", - "edit_caption": "mdi:pencil", - "edit_replymarkup": "mdi:pencil", - "answer_callback_query": "mdi:check", - "delete_message": "mdi:delete" + "send_message": { + "service": "mdi:send" + }, + "send_photo": { + "service": "mdi:camera" + }, + "send_sticker": { + "service": "mdi:sticker" + }, + "send_animation": { + "service": "mdi:animation" + }, + "send_video": { + "service": "mdi:video" + }, + "send_voice": { + "service": "mdi:microphone" + }, + "send_document": { + "service": "mdi:file-document" + }, + "send_location": { + "service": "mdi:map-marker" + }, + "send_poll": { + "service": "mdi:poll" + }, + "edit_message": { + "service": "mdi:pencil" + }, + "edit_caption": { + "service": "mdi:pencil" + }, + "edit_replymarkup": { + "service": "mdi:pencil" + }, + "answer_callback_query": { + "service": "mdi:check" + }, + "delete_message": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/telegram_bot/manifest.json b/homeassistant/components/telegram_bot/manifest.json index c176e6c2cdf..b432c88762f 100644 --- a/homeassistant/components/telegram_bot/manifest.json +++ b/homeassistant/components/telegram_bot/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/telegram_bot", "iot_class": "cloud_push", "loggers": ["telegram"], - "requirements": ["python-telegram-bot[socks]==21.0.1"] + "requirements": ["python-telegram-bot[socks]==21.5"] } diff --git a/homeassistant/components/telegram_bot/polling.py b/homeassistant/components/telegram_bot/polling.py index 45d2ee65b45..bee7f752f6c 100644 --- a/homeassistant/components/telegram_bot/polling.py +++ b/homeassistant/components/telegram_bot/polling.py @@ -25,14 +25,22 @@ async def async_setup_platform(hass, bot, config): async def process_error(update: Update, context: CallbackContext) -> None: """Telegram bot error handler.""" + if context.error: + error_callback(context.error, update) + + +def error_callback(error: Exception, update: Update | None = None) -> None: + """Log the error.""" try: - if context.error: - raise context.error + raise error except (TimedOut, NetworkError, RetryAfter): # Long polling timeout or connection problem. Nothing serious. pass except TelegramError: - _LOGGER.error('Update "%s" caused error: "%s"', update, context.error) + if update is not None: + _LOGGER.error('Update "%s" caused error: "%s"', update, error) + else: + _LOGGER.error("%s: %s", error.__class__.__name__, error) class PollBot(BaseTelegramBotEntity): @@ -53,7 +61,7 @@ class PollBot(BaseTelegramBotEntity): """Start the polling task.""" _LOGGER.debug("Starting polling") await self.application.initialize() - await self.application.updater.start_polling() + await self.application.updater.start_polling(error_callback=error_callback) await self.application.start() async def stop_polling(self, event=None): diff --git a/homeassistant/components/tellduslive/config_flow.py b/homeassistant/components/tellduslive/config_flow.py index 6f1318ca61e..6b5e7150d67 100644 --- a/homeassistant/components/tellduslive/config_flow.py +++ b/homeassistant/components/tellduslive/config_flow.py @@ -3,11 +3,12 @@ import asyncio import logging import os +from typing import Any from tellduslive import Session, supports_local_api import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST from homeassistant.util.json import load_json_object @@ -50,7 +51,9 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): ) return self._session.authorize_url - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Let user select host or cloud.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") @@ -122,14 +125,14 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_user() - async def async_step_import(self, user_input): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry.""" if self._async_current_entries(): return self.async_abort(reason="already_setup") - self._scan_interval = user_input[KEY_SCAN_INTERVAL] - if user_input[CONF_HOST] != DOMAIN: - self._hosts.append(user_input[CONF_HOST]) + self._scan_interval = import_data[KEY_SCAN_INTERVAL] + if import_data[CONF_HOST] != DOMAIN: + self._hosts.append(import_data[CONF_HOST]) if not await self.hass.async_add_executor_job( os.path.isfile, self.hass.config.path(TELLDUS_CONFIG_FILE) @@ -141,7 +144,7 @@ class FlowHandler(ConfigFlow, domain=DOMAIN): ) host = next(iter(conf)) - if user_input[CONF_HOST] != host: + if import_data[CONF_HOST] != host: return await self.async_step_user() host = CLOUD_NAME if host == "tellduslive" else host diff --git a/homeassistant/components/tellstick/sensor.py b/homeassistant/components/tellstick/sensor.py index 2c304f259da..1e27511bd84 100644 --- a/homeassistant/components/tellstick/sensor.py +++ b/homeassistant/components/tellstick/sensor.py @@ -29,7 +29,7 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType _LOGGER = logging.getLogger(__name__) -DatatypeDescription = namedtuple( +DatatypeDescription = namedtuple( # noqa: PYI024 "DatatypeDescription", ["name", "unit", "device_class"] ) diff --git a/homeassistant/components/telnet/switch.py b/homeassistant/components/telnet/switch.py index 3b4b9e137d1..82d8905a775 100644 --- a/homeassistant/components/telnet/switch.py +++ b/homeassistant/components/telnet/switch.py @@ -67,11 +67,6 @@ def setup_platform( switches = [] for object_id, device_config in devices.items(): - value_template: Template | None = device_config.get(CONF_VALUE_TEMPLATE) - - if value_template is not None: - value_template.hass = hass - switches.append( TelnetSwitch( object_id, @@ -81,7 +76,7 @@ def setup_platform( device_config[CONF_COMMAND_ON], device_config[CONF_COMMAND_OFF], device_config.get(CONF_COMMAND_STATE), - value_template, + device_config.get(CONF_VALUE_TEMPLATE), device_config[CONF_TIMEOUT], ) ) diff --git a/homeassistant/components/template/alarm_control_panel.py b/homeassistant/components/template/alarm_control_panel.py index 2ac91d39858..7c23fdcebcc 100644 --- a/homeassistant/components/template/alarm_control_panel.py +++ b/homeassistant/components/template/alarm_control_panel.py @@ -108,7 +108,7 @@ async def _async_create_entities(hass, config): alarm_control_panels = [] for object_id, entity_config in config[CONF_ALARM_CONTROL_PANELS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) alarm_control_panels.append( diff --git a/homeassistant/components/template/binary_sensor.py b/homeassistant/components/template/binary_sensor.py index 68b3cd6d35a..187c7079f59 100644 --- a/homeassistant/components/template/binary_sensor.py +++ b/homeassistant/components/template/binary_sensor.py @@ -119,17 +119,21 @@ LEGACY_BINARY_SENSOR_SCHEMA = vol.All( ) -def rewrite_legacy_to_modern_conf(cfg: dict[str, dict]) -> list[dict]: +def rewrite_legacy_to_modern_conf( + hass: HomeAssistant, cfg: dict[str, dict] +) -> list[dict]: """Rewrite legacy binary sensor definitions to modern ones.""" sensors = [] for object_id, entity_cfg in cfg.items(): entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id} - entity_cfg = rewrite_common_legacy_to_modern_conf(entity_cfg, LEGACY_FIELDS) + entity_cfg = rewrite_common_legacy_to_modern_conf( + hass, entity_cfg, LEGACY_FIELDS + ) if CONF_NAME not in entity_cfg: - entity_cfg[CONF_NAME] = template.Template(object_id) + entity_cfg[CONF_NAME] = template.Template(object_id, hass) sensors.append(entity_cfg) @@ -183,7 +187,7 @@ async def async_setup_platform( _async_create_template_tracking_entities( async_add_entities, hass, - rewrite_legacy_to_modern_conf(config[CONF_SENSORS]), + rewrite_legacy_to_modern_conf(hass, config[CONF_SENSORS]), None, ) return diff --git a/homeassistant/components/template/config.py b/homeassistant/components/template/config.py index 42a57cfc4aa..e2015743a0e 100644 --- a/homeassistant/components/template/config.py +++ b/homeassistant/components/template/config.py @@ -115,7 +115,7 @@ async def async_validate_config(hass: HomeAssistant, config: ConfigType) -> Conf ) definitions = list(cfg[new_key]) if new_key in cfg else [] - definitions.extend(transform(cfg[old_key])) + definitions.extend(transform(hass, cfg[old_key])) cfg = {**cfg, new_key: definitions} config_sections.append(cfg) diff --git a/homeassistant/components/template/config_flow.py b/homeassistant/components/template/config_flow.py index c52a890c1f7..2c12a0d03e9 100644 --- a/homeassistant/components/template/config_flow.py +++ b/homeassistant/components/template/config_flow.py @@ -41,6 +41,16 @@ from homeassistant.helpers.schema_config_entry_flow import ( from .binary_sensor import async_create_preview_binary_sensor from .const import CONF_PRESS, CONF_TURN_OFF, CONF_TURN_ON, DOMAIN +from .number import ( + CONF_MAX, + CONF_MIN, + CONF_SET_VALUE, + CONF_STEP, + DEFAULT_MAX_VALUE, + DEFAULT_MIN_VALUE, + DEFAULT_STEP, + async_create_preview_number, +) from .select import CONF_OPTIONS, CONF_SELECT_OPTION from .sensor import async_create_preview_sensor from .switch import async_create_preview_switch @@ -94,6 +104,21 @@ def generate_schema(domain: str, flow_type: str) -> vol.Schema: vol.Optional(CONF_VERIFY_SSL, default=True): selector.BooleanSelector(), } + if domain == Platform.NUMBER: + schema |= { + vol.Required(CONF_STATE): selector.TemplateSelector(), + vol.Required( + CONF_MIN, default=f"{{{{{DEFAULT_MIN_VALUE}}}}}" + ): selector.TemplateSelector(), + vol.Required( + CONF_MAX, default=f"{{{{{DEFAULT_MAX_VALUE}}}}}" + ): selector.TemplateSelector(), + vol.Required( + CONF_STEP, default=f"{{{{{DEFAULT_STEP}}}}}" + ): selector.TemplateSelector(), + vol.Optional(CONF_SET_VALUE): selector.ActionSelector(), + } + if domain == Platform.SELECT: schema |= _SCHEMA_STATE | { vol.Required(CONF_OPTIONS): selector.TemplateSelector(), @@ -238,6 +263,7 @@ TEMPLATE_TYPES = [ "binary_sensor", "button", "image", + "number", "select", "sensor", "switch", @@ -258,6 +284,11 @@ CONFIG_FLOW = { config_schema(Platform.IMAGE), validate_user_input=validate_user_input(Platform.IMAGE), ), + Platform.NUMBER: SchemaFlowFormStep( + config_schema(Platform.NUMBER), + preview="template", + validate_user_input=validate_user_input(Platform.NUMBER), + ), Platform.SELECT: SchemaFlowFormStep( config_schema(Platform.SELECT), validate_user_input=validate_user_input(Platform.SELECT), @@ -290,6 +321,11 @@ OPTIONS_FLOW = { options_schema(Platform.IMAGE), validate_user_input=validate_user_input(Platform.IMAGE), ), + Platform.NUMBER: SchemaFlowFormStep( + options_schema(Platform.NUMBER), + preview="template", + validate_user_input=validate_user_input(Platform.NUMBER), + ), Platform.SELECT: SchemaFlowFormStep( options_schema(Platform.SELECT), validate_user_input=validate_user_input(Platform.SELECT), @@ -311,6 +347,7 @@ CREATE_PREVIEW_ENTITY: dict[ Callable[[HomeAssistant, str, dict[str, Any]], TemplateEntity], ] = { "binary_sensor": async_create_preview_binary_sensor, + "number": async_create_preview_number, "sensor": async_create_preview_sensor, "switch": async_create_preview_switch, } diff --git a/homeassistant/components/template/cover.py b/homeassistant/components/template/cover.py index d50067f6278..2c84387ed64 100644 --- a/homeassistant/components/template/cover.py +++ b/homeassistant/components/template/cover.py @@ -106,7 +106,7 @@ async def _async_create_entities(hass, config): covers = [] for object_id, entity_config in config[CONF_COVERS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) diff --git a/homeassistant/components/template/fan.py b/homeassistant/components/template/fan.py index 20a2159e378..cedd7d0d725 100644 --- a/homeassistant/components/template/fan.py +++ b/homeassistant/components/template/fan.py @@ -94,7 +94,7 @@ async def _async_create_entities(hass, config): fans = [] for object_id, entity_config in config[CONF_FANS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) diff --git a/homeassistant/components/template/icons.json b/homeassistant/components/template/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/template/icons.json +++ b/homeassistant/components/template/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/template/light.py b/homeassistant/components/template/light.py index ba6b8ce846b..cae6c0cebc1 100644 --- a/homeassistant/components/template/light.py +++ b/homeassistant/components/template/light.py @@ -126,7 +126,7 @@ async def _async_create_entities(hass, config): lights = [] for object_id, entity_config in config[CONF_LIGHTS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) lights.append( diff --git a/homeassistant/components/template/lock.py b/homeassistant/components/template/lock.py index 0fa219fcd9b..5c0b67a23dc 100644 --- a/homeassistant/components/template/lock.py +++ b/homeassistant/components/template/lock.py @@ -59,7 +59,7 @@ PLATFORM_SCHEMA = LOCK_PLATFORM_SCHEMA.extend( async def _async_create_entities(hass, config): """Create the Template lock.""" - config = rewrite_common_legacy_to_modern_conf(config) + config = rewrite_common_legacy_to_modern_conf(hass, config) return [TemplateLock(hass, config, config.get(CONF_UNIQUE_ID))] diff --git a/homeassistant/components/template/number.py b/homeassistant/components/template/number.py index d4004ee9535..499ddc192cc 100644 --- a/homeassistant/components/template/number.py +++ b/homeassistant/components/template/number.py @@ -8,9 +8,6 @@ from typing import Any import voluptuous as vol from homeassistant.components.number import ( - ATTR_MAX, - ATTR_MIN, - ATTR_STEP, ATTR_VALUE, DEFAULT_MAX_VALUE, DEFAULT_MIN_VALUE, @@ -18,9 +15,17 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, NumberEntity, ) -from homeassistant.const import CONF_NAME, CONF_OPTIMISTIC, CONF_STATE, CONF_UNIQUE_ID +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import ( + CONF_DEVICE_ID, + CONF_NAME, + CONF_OPTIMISTIC, + CONF_STATE, + CONF_UNIQUE_ID, +) from homeassistant.core import HomeAssistant, callback -import homeassistant.helpers.config_validation as cv +from homeassistant.helpers import config_validation as cv, selector +from homeassistant.helpers.device import async_device_info_to_link_from_device_id from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.script import Script from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -37,6 +42,9 @@ from .trigger_entity import TriggerEntity _LOGGER = logging.getLogger(__name__) CONF_SET_VALUE = "set_value" +CONF_MIN = "min" +CONF_MAX = "max" +CONF_STEP = "step" DEFAULT_NAME = "Template Number" DEFAULT_OPTIMISTIC = False @@ -47,9 +55,9 @@ NUMBER_SCHEMA = ( vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.template, vol.Required(CONF_STATE): cv.template, vol.Required(CONF_SET_VALUE): cv.SCRIPT_SCHEMA, - vol.Required(ATTR_STEP): cv.template, - vol.Optional(ATTR_MIN, default=DEFAULT_MIN_VALUE): cv.template, - vol.Optional(ATTR_MAX, default=DEFAULT_MAX_VALUE): cv.template, + vol.Required(CONF_STEP): cv.template, + vol.Optional(CONF_MIN, default=DEFAULT_MIN_VALUE): cv.template, + vol.Optional(CONF_MAX, default=DEFAULT_MAX_VALUE): cv.template, vol.Optional(CONF_OPTIMISTIC, default=DEFAULT_OPTIMISTIC): cv.boolean, vol.Optional(CONF_UNIQUE_ID): cv.string, } @@ -57,6 +65,17 @@ NUMBER_SCHEMA = ( .extend(TEMPLATE_ENTITY_AVAILABILITY_SCHEMA.schema) .extend(TEMPLATE_ENTITY_ICON_SCHEMA.schema) ) +NUMBER_CONFIG_SCHEMA = vol.Schema( + { + vol.Required(CONF_NAME): cv.template, + vol.Required(CONF_STATE): cv.template, + vol.Required(CONF_STEP): cv.template, + vol.Required(CONF_SET_VALUE): cv.SCRIPT_SCHEMA, + vol.Optional(CONF_MIN): cv.template, + vol.Optional(CONF_MAX): cv.template, + vol.Optional(CONF_DEVICE_ID): selector.DeviceSelector(), + } +) async def _async_create_entities( @@ -99,6 +118,27 @@ async def async_setup_platform( ) +async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Initialize config entry.""" + _options = dict(config_entry.options) + _options.pop("template_type") + validated_config = NUMBER_CONFIG_SCHEMA(_options) + async_add_entities([TemplateNumber(hass, validated_config, config_entry.entry_id)]) + + +@callback +def async_create_preview_number( + hass: HomeAssistant, name: str, config: dict[str, Any] +) -> TemplateNumber: + """Create a preview number.""" + validated_config = NUMBER_CONFIG_SCHEMA(config | {CONF_NAME: name}) + return TemplateNumber(hass, validated_config, None) + + class TemplateNumber(TemplateEntity, NumberEntity): """Representation of a template number.""" @@ -117,13 +157,18 @@ class TemplateNumber(TemplateEntity, NumberEntity): self._command_set_value = Script( hass, config[CONF_SET_VALUE], self._attr_name, DOMAIN ) - self._step_template = config[ATTR_STEP] - self._min_value_template = config[ATTR_MIN] - self._max_value_template = config[ATTR_MAX] - self._attr_assumed_state = self._optimistic = config[CONF_OPTIMISTIC] + + self._step_template = config[CONF_STEP] + self._min_value_template = config[CONF_MIN] + self._max_value_template = config[CONF_MAX] + self._attr_assumed_state = self._optimistic = config.get(CONF_OPTIMISTIC) self._attr_native_step = DEFAULT_STEP self._attr_native_min_value = DEFAULT_MIN_VALUE self._attr_native_max_value = DEFAULT_MAX_VALUE + self._attr_device_info = async_device_info_to_link_from_device_id( + hass, + config.get(CONF_DEVICE_ID), + ) @callback def _async_setup_templates(self) -> None: @@ -161,11 +206,12 @@ class TemplateNumber(TemplateEntity, NumberEntity): if self._optimistic: self._attr_native_value = value self.async_write_ha_state() - await self.async_run_script( - self._command_set_value, - run_variables={ATTR_VALUE: value}, - context=self._context, - ) + if self._command_set_value: + await self.async_run_script( + self._command_set_value, + run_variables={ATTR_VALUE: value}, + context=self._context, + ) class TriggerNumberEntity(TriggerEntity, NumberEntity): @@ -174,9 +220,9 @@ class TriggerNumberEntity(TriggerEntity, NumberEntity): domain = NUMBER_DOMAIN extra_template_keys = ( CONF_STATE, - ATTR_STEP, - ATTR_MIN, - ATTR_MAX, + CONF_STEP, + CONF_MIN, + CONF_MAX, ) def __init__( @@ -203,21 +249,21 @@ class TriggerNumberEntity(TriggerEntity, NumberEntity): def native_min_value(self) -> int: """Return the minimum value.""" return vol.Any(vol.Coerce(float), None)( - self._rendered.get(ATTR_MIN, super().native_min_value) + self._rendered.get(CONF_MIN, super().native_min_value) ) @property def native_max_value(self) -> int: """Return the maximum value.""" return vol.Any(vol.Coerce(float), None)( - self._rendered.get(ATTR_MAX, super().native_max_value) + self._rendered.get(CONF_MAX, super().native_max_value) ) @property def native_step(self) -> int: """Return the increment/decrement step.""" return vol.Any(vol.Coerce(float), None)( - self._rendered.get(ATTR_STEP, super().native_step) + self._rendered.get(CONF_STEP, super().native_step) ) async def async_set_native_value(self, value: float) -> None: diff --git a/homeassistant/components/template/sensor.py b/homeassistant/components/template/sensor.py index 70a2d5dd650..ee24407699d 100644 --- a/homeassistant/components/template/sensor.py +++ b/homeassistant/components/template/sensor.py @@ -142,17 +142,21 @@ def extra_validation_checks(val): return val -def rewrite_legacy_to_modern_conf(cfg: dict[str, dict]) -> list[dict]: +def rewrite_legacy_to_modern_conf( + hass: HomeAssistant, cfg: dict[str, dict] +) -> list[dict]: """Rewrite legacy sensor definitions to modern ones.""" sensors = [] for object_id, entity_cfg in cfg.items(): entity_cfg = {**entity_cfg, CONF_OBJECT_ID: object_id} - entity_cfg = rewrite_common_legacy_to_modern_conf(entity_cfg, LEGACY_FIELDS) + entity_cfg = rewrite_common_legacy_to_modern_conf( + hass, entity_cfg, LEGACY_FIELDS + ) if CONF_NAME not in entity_cfg: - entity_cfg[CONF_NAME] = template.Template(object_id) + entity_cfg[CONF_NAME] = template.Template(object_id, hass) sensors.append(entity_cfg) @@ -210,7 +214,7 @@ async def async_setup_platform( _async_create_template_tracking_entities( async_add_entities, hass, - rewrite_legacy_to_modern_conf(config[CONF_SENSORS]), + rewrite_legacy_to_modern_conf(hass, config[CONF_SENSORS]), None, ) return diff --git a/homeassistant/components/template/strings.json b/homeassistant/components/template/strings.json index b1f14af2202..fa365bf3cfd 100644 --- a/homeassistant/components/template/strings.json +++ b/homeassistant/components/template/strings.json @@ -37,6 +37,21 @@ }, "title": "Template image" }, + "number": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "state": "[%key:component::template::config::step::sensor::data::state%]", + "step": "Step value", + "set_value": "Actions on set value", + "max": "Maximum value", + "min": "Minimum value" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "Template number" + }, "select": { "data": { "device_id": "[%key:common::config_flow::data::device%]", @@ -70,6 +85,7 @@ "binary_sensor": "Template a binary sensor", "button": "Template a button", "image": "Template a image", + "number": "Template a number", "select": "Template a select", "sensor": "Template a sensor", "switch": "Template a switch" @@ -125,6 +141,21 @@ }, "title": "[%key:component::template::config::step::image::title%]" }, + "number": { + "data": { + "device_id": "[%key:common::config_flow::data::device%]", + "name": "[%key:common::config_flow::data::name%]", + "state": "[%key:component::template::config::step::sensor::data::state%]", + "step": "[%key:component::template::config::step::number::data::step%]", + "set_value": "[%key:component::template::config::step::number::data::set_value%]", + "max": "[%key:component::template::config::step::number::data::max%]", + "min": "[%key:component::template::config::step::number::data::min%]" + }, + "data_description": { + "device_id": "[%key:component::template::config::step::sensor::data_description::device_id%]" + }, + "title": "[%key:component::template::config::step::number::title%]" + }, "select": { "data": { "device_id": "[%key:common::config_flow::data::device%]", diff --git a/homeassistant/components/template/switch.py b/homeassistant/components/template/switch.py index fbb35399ef8..9145625f706 100644 --- a/homeassistant/components/template/switch.py +++ b/homeassistant/components/template/switch.py @@ -76,7 +76,7 @@ async def _async_create_entities(hass, config): switches = [] for object_id, entity_config in config[CONF_SWITCHES].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) switches.append( diff --git a/homeassistant/components/template/template_entity.py b/homeassistant/components/template/template_entity.py index b5d2ab6fff3..a074f828284 100644 --- a/homeassistant/components/template/template_entity.py +++ b/homeassistant/components/template/template_entity.py @@ -123,7 +123,9 @@ LEGACY_FIELDS = { def rewrite_common_legacy_to_modern_conf( - entity_cfg: dict[str, Any], extra_legacy_fields: dict[str, str] | None = None + hass: HomeAssistant, + entity_cfg: dict[str, Any], + extra_legacy_fields: dict[str, str] | None = None, ) -> dict[str, Any]: """Rewrite legacy config.""" entity_cfg = {**entity_cfg} @@ -138,11 +140,11 @@ def rewrite_common_legacy_to_modern_conf( val = entity_cfg.pop(from_key) if isinstance(val, str): - val = Template(val) + val = Template(val, hass) entity_cfg[to_key] = val if CONF_NAME in entity_cfg and isinstance(entity_cfg[CONF_NAME], str): - entity_cfg[CONF_NAME] = Template(entity_cfg[CONF_NAME]) + entity_cfg[CONF_NAME] = Template(entity_cfg[CONF_NAME], hass) return entity_cfg @@ -310,7 +312,6 @@ class TemplateEntity(Entity): # Try to render the name as it can influence the entity ID self._attr_name = fallback_name if self._friendly_name_template: - self._friendly_name_template.hass = hass with contextlib.suppress(TemplateError): self._attr_name = self._friendly_name_template.async_render( variables=variables, parse_result=False @@ -319,14 +320,12 @@ class TemplateEntity(Entity): # Templates will not render while the entity is unavailable, try to render the # icon and picture templates. if self._entity_picture_template: - self._entity_picture_template.hass = hass with contextlib.suppress(TemplateError): self._attr_entity_picture = self._entity_picture_template.async_render( variables=variables, parse_result=False ) if self._icon_template: - self._icon_template.hass = hass with contextlib.suppress(TemplateError): self._attr_icon = self._icon_template.async_render( variables=variables, parse_result=False @@ -388,8 +387,10 @@ class TemplateEntity(Entity): If True, the attribute will be set to None if the template errors. """ - assert self.hass is not None, "hass cannot be None" - template.hass = self.hass + if self.hass is None: + raise ValueError("hass cannot be None") + if template.hass is None: + raise ValueError("template.hass cannot be None") template_attribute = _TemplateAttribute( self, attribute, template, validator, on_update, none_on_template_error ) diff --git a/homeassistant/components/template/trigger.py b/homeassistant/components/template/trigger.py index 09ad0754634..44ac2d93051 100644 --- a/homeassistant/components/template/trigger.py +++ b/homeassistant/components/template/trigger.py @@ -49,9 +49,7 @@ async def async_attach_trigger( """Listen for state changes based on configuration.""" trigger_data = trigger_info["trigger_data"] value_template: Template = config[CONF_VALUE_TEMPLATE] - value_template.hass = hass time_delta = config.get(CONF_FOR) - template.attach(hass, time_delta) delay_cancel = None job = HassJob(action) armed = False diff --git a/homeassistant/components/template/vacuum.py b/homeassistant/components/template/vacuum.py index 9062f71d818..1d021bcb571 100644 --- a/homeassistant/components/template/vacuum.py +++ b/homeassistant/components/template/vacuum.py @@ -100,7 +100,7 @@ async def _async_create_entities(hass, config): vacuums = [] for object_id, entity_config in config[CONF_VACUUMS].items(): - entity_config = rewrite_common_legacy_to_modern_conf(entity_config) + entity_config = rewrite_common_legacy_to_modern_conf(hass, entity_config) unique_id = entity_config.get(CONF_UNIQUE_ID) vacuums.append( @@ -318,7 +318,7 @@ class TemplateVacuum(TemplateEntity, StateVacuumEntity): try: battery_level_int = int(battery_level) if not 0 <= battery_level_int <= 100: - raise ValueError + raise ValueError # noqa: TRY301 except ValueError: _LOGGER.error( "Received invalid battery level: %s for entity %s. Expected: 0-100", diff --git a/homeassistant/components/template/weather.py b/homeassistant/components/template/weather.py index 5c3e4107b2c..ec6d1f08dd3 100644 --- a/homeassistant/components/template/weather.py +++ b/homeassistant/components/template/weather.py @@ -153,7 +153,7 @@ async def async_setup_platform( ) return - config = rewrite_common_legacy_to_modern_conf(config) + config = rewrite_common_legacy_to_modern_conf(hass, config) unique_id = config.get(CONF_UNIQUE_ID) async_add_entities( diff --git a/homeassistant/components/tensorflow/image_processing.py b/homeassistant/components/tensorflow/image_processing.py index 85fe6439f1c..f13c0b24d0b 100644 --- a/homeassistant/components/tensorflow/image_processing.py +++ b/homeassistant/components/tensorflow/image_processing.py @@ -261,8 +261,6 @@ class TensorFlowImageProcessor(ImageProcessingEntity): area_config.get(CONF_RIGHT), ] - template.attach(hass, self._file_out) - self._matches = {} self._total_matches = 0 self._last_image = None diff --git a/homeassistant/components/tesla_fleet/__init__.py b/homeassistant/components/tesla_fleet/__init__.py index 4eac1168674..183e7e753b5 100644 --- a/homeassistant/components/tesla_fleet/__init__.py +++ b/homeassistant/components/tesla_fleet/__init__.py @@ -3,17 +3,19 @@ import asyncio from typing import Final +from aiohttp.client_exceptions import ClientResponseError import jwt from tesla_fleet_api import EnergySpecific, TeslaFleetApi, VehicleSpecific from tesla_fleet_api.const import Scope from tesla_fleet_api.exceptions import ( + InvalidRegion, InvalidToken, + LibraryError, LoginRequired, OAuthExpired, TeslaFleetError, ) -from homeassistant.components.application_credentials import ClientCredential from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ACCESS_TOKEN, CONF_TOKEN, Platform from homeassistant.core import HomeAssistant @@ -27,15 +29,15 @@ from homeassistant.helpers.config_entry_oauth2_flow import ( import homeassistant.helpers.config_validation as cv from homeassistant.helpers.device_registry import DeviceInfo -from .application_credentials import TeslaOAuth2Implementation from .config_flow import OAuth2FlowHandler -from .const import CLIENT_ID, DOMAIN, LOGGER, MODELS, NAME +from .const import DOMAIN, LOGGER, MODELS from .coordinator import ( TeslaFleetEnergySiteInfoCoordinator, TeslaFleetEnergySiteLiveCoordinator, TeslaFleetVehicleDataCoordinator, ) from .models import TeslaFleetData, TeslaFleetEnergyData, TeslaFleetVehicleData +from .oauth import TeslaSystemImplementation PLATFORMS: Final = [Platform.BINARY_SENSOR, Platform.DEVICE_TRACKER, Platform.SENSOR] @@ -56,7 +58,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - OAuth2FlowHandler.async_register_implementation( hass, - TeslaOAuth2Implementation(hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME)), + TeslaSystemImplementation(hass), ) implementation = await async_get_config_entry_implementation(hass, entry) @@ -65,7 +67,12 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - async def _refresh_token() -> str: async with refresh_lock: - await oauth_session.async_ensure_token_valid() + try: + await oauth_session.async_ensure_token_valid() + except ClientResponseError as e: + if e.status == 401: + raise ConfigEntryAuthFailed from e + raise ConfigEntryNotReady from e token: str = oauth_session.token[CONF_ACCESS_TOKEN] return token @@ -76,7 +83,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - region=region, charging_scope=False, partner_scope=False, - user_scope=False, energy_scope=Scope.ENERGY_DEVICE_DATA in scopes, vehicle_scope=Scope.VEHICLE_DEVICE_DATA in scopes, refresh_hook=_refresh_token, @@ -85,6 +91,16 @@ async def async_setup_entry(hass: HomeAssistant, entry: TeslaFleetConfigEntry) - products = (await tesla.products())["response"] except (InvalidToken, OAuthExpired, LoginRequired) as e: raise ConfigEntryAuthFailed from e + except InvalidRegion: + try: + LOGGER.info("Region is invalid, trying to find the correct region") + await tesla.find_server() + try: + products = (await tesla.products())["response"] + except TeslaFleetError as e: + raise ConfigEntryNotReady from e + except LibraryError as e: + raise ConfigEntryAuthFailed from e except TeslaFleetError as e: raise ConfigEntryNotReady from e diff --git a/homeassistant/components/tesla_fleet/application_credentials.py b/homeassistant/components/tesla_fleet/application_credentials.py index 32e16cc9244..0ef38567b65 100644 --- a/homeassistant/components/tesla_fleet/application_credentials.py +++ b/homeassistant/components/tesla_fleet/application_credentials.py @@ -1,72 +1,18 @@ """Application Credentials platform the Tesla Fleet integration.""" -import base64 -import hashlib -import secrets -from typing import Any - -from homeassistant.components.application_credentials import ( - AuthImplementation, - AuthorizationServer, - ClientCredential, -) +from homeassistant.components.application_credentials import ClientCredential from homeassistant.core import HomeAssistant from homeassistant.helpers import config_entry_oauth2_flow -from .const import AUTHORIZE_URL, DOMAIN, SCOPES, TOKEN_URL - -AUTH_SERVER = AuthorizationServer(AUTHORIZE_URL, TOKEN_URL) +from .oauth import TeslaUserImplementation async def async_get_auth_implementation( hass: HomeAssistant, auth_domain: str, credential: ClientCredential ) -> config_entry_oauth2_flow.AbstractOAuth2Implementation: """Return auth implementation.""" - return TeslaOAuth2Implementation( + return TeslaUserImplementation( hass, - DOMAIN, + auth_domain, credential, ) - - -class TeslaOAuth2Implementation(AuthImplementation): - """Tesla Fleet API Open Source Oauth2 implementation.""" - - def __init__( - self, hass: HomeAssistant, domain: str, credential: ClientCredential - ) -> None: - """Initialize local auth implementation.""" - self.hass = hass - self._domain = domain - - # Setup PKCE - self.code_verifier = secrets.token_urlsafe(32) - hashed_verifier = hashlib.sha256(self.code_verifier.encode()).digest() - self.code_challenge = ( - base64.urlsafe_b64encode(hashed_verifier).decode().replace("=", "") - ) - super().__init__( - hass, - domain, - credential, - AUTH_SERVER, - ) - - @property - def extra_authorize_data(self) -> dict[str, Any]: - """Extra data that needs to be appended to the authorize url.""" - return { - "scope": " ".join(SCOPES), - "code_challenge": self.code_challenge, # PKCE - } - - async def async_resolve_external_data(self, external_data: Any) -> dict: - """Resolve the authorization code to tokens.""" - return await self._token_request( - { - "grant_type": "authorization_code", - "code": external_data["code"], - "redirect_uri": external_data["state"]["redirect_uri"], - "code_verifier": self.code_verifier, # PKCE - } - ) diff --git a/homeassistant/components/tesla_fleet/config_flow.py b/homeassistant/components/tesla_fleet/config_flow.py index c09ea78177f..64b88792387 100644 --- a/homeassistant/components/tesla_fleet/config_flow.py +++ b/homeassistant/components/tesla_fleet/config_flow.py @@ -8,12 +8,11 @@ from typing import Any import jwt -from homeassistant.components.application_credentials import ClientCredential from homeassistant.config_entries import ConfigEntry, ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow -from .application_credentials import TeslaOAuth2Implementation -from .const import CLIENT_ID, DOMAIN, LOGGER, NAME +from .const import DOMAIN, LOGGER +from .oauth import TeslaSystemImplementation class OAuth2FlowHandler( @@ -35,9 +34,7 @@ class OAuth2FlowHandler( """Handle a flow start.""" self.async_register_implementation( self.hass, - TeslaOAuth2Implementation( - self.hass, DOMAIN, ClientCredential(CLIENT_ID, "", NAME) - ), + TeslaSystemImplementation(self.hass), ) return await super().async_step_user() @@ -86,5 +83,8 @@ class OAuth2FlowHandler( ) -> ConfigFlowResult: """Confirm reauth dialog.""" if user_input is None: - return self.async_show_form(step_id="reauth_confirm") + return self.async_show_form( + step_id="reauth_confirm", + description_placeholders={"name": "Tesla Fleet"}, + ) return await self.async_step_user() diff --git a/homeassistant/components/tesla_fleet/const.py b/homeassistant/components/tesla_fleet/const.py index 9d78716a13e..081225c296c 100644 --- a/homeassistant/components/tesla_fleet/const.py +++ b/homeassistant/components/tesla_fleet/const.py @@ -13,7 +13,6 @@ CONF_REFRESH_TOKEN = "refresh_token" LOGGER = logging.getLogger(__package__) -NAME = "Home Assistant" CLIENT_ID = "71b813eb-4a2e-483a-b831-4dec5cb9bf0d" AUTHORIZE_URL = "https://auth.tesla.com/oauth2/v3/authorize" TOKEN_URL = "https://auth.tesla.com/oauth2/v3/token" diff --git a/homeassistant/components/tesla_fleet/manifest.json b/homeassistant/components/tesla_fleet/manifest.json index 2acacab5065..29966b3b49c 100644 --- a/homeassistant/components/tesla_fleet/manifest.json +++ b/homeassistant/components/tesla_fleet/manifest.json @@ -7,5 +7,6 @@ "documentation": "https://www.home-assistant.io/integrations/tesla_fleet", "iot_class": "cloud_polling", "loggers": ["tesla-fleet-api"], + "quality_scale": "gold", "requirements": ["tesla-fleet-api==0.7.3"] } diff --git a/homeassistant/components/tesla_fleet/oauth.py b/homeassistant/components/tesla_fleet/oauth.py new file mode 100644 index 00000000000..00976abf56f --- /dev/null +++ b/homeassistant/components/tesla_fleet/oauth.py @@ -0,0 +1,86 @@ +"""Provide oauth implementations for the Tesla Fleet integration.""" + +import base64 +import hashlib +import secrets +from typing import Any + +from homeassistant.components.application_credentials import ( + AuthImplementation, + AuthorizationServer, + ClientCredential, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import config_entry_oauth2_flow + +from .const import AUTHORIZE_URL, CLIENT_ID, DOMAIN, SCOPES, TOKEN_URL + + +class TeslaSystemImplementation(config_entry_oauth2_flow.LocalOAuth2Implementation): + """Tesla Fleet API open source Oauth2 implementation.""" + + code_verifier: str + code_challenge: str + + def __init__(self, hass: HomeAssistant) -> None: + """Initialize open source Oauth2 implementation.""" + + # Setup PKCE + self.code_verifier = secrets.token_urlsafe(32) + hashed_verifier = hashlib.sha256(self.code_verifier.encode()).digest() + self.code_challenge = ( + base64.urlsafe_b64encode(hashed_verifier).decode().replace("=", "") + ) + super().__init__( + hass, + DOMAIN, + CLIENT_ID, + "", + AUTHORIZE_URL, + TOKEN_URL, + ) + + @property + def name(self) -> str: + """Name of the implementation.""" + return "Built-in open source client ID" + + @property + def extra_authorize_data(self) -> dict[str, Any]: + """Extra data that needs to be appended to the authorize url.""" + return { + "scope": " ".join(SCOPES), + "code_challenge": self.code_challenge, # PKCE + } + + async def async_resolve_external_data(self, external_data: Any) -> dict: + """Resolve the authorization code to tokens.""" + return await self._token_request( + { + "grant_type": "authorization_code", + "code": external_data["code"], + "redirect_uri": external_data["state"]["redirect_uri"], + "code_verifier": self.code_verifier, # PKCE + } + ) + + +class TeslaUserImplementation(AuthImplementation): + """Tesla Fleet API user Oauth2 implementation.""" + + def __init__( + self, hass: HomeAssistant, auth_domain: str, credential: ClientCredential + ) -> None: + """Initialize user Oauth2 implementation.""" + + super().__init__( + hass, + auth_domain, + credential, + AuthorizationServer(AUTHORIZE_URL, TOKEN_URL), + ) + + @property + def extra_authorize_data(self) -> dict[str, Any]: + """Extra data that needs to be appended to the authorize url.""" + return {"scope": " ".join(SCOPES)} diff --git a/homeassistant/components/tesla_fleet/strings.json b/homeassistant/components/tesla_fleet/strings.json index 6e74714ddd5..d4848836689 100644 --- a/homeassistant/components/tesla_fleet/strings.json +++ b/homeassistant/components/tesla_fleet/strings.json @@ -19,7 +19,7 @@ }, "reauth_confirm": { "title": "[%key:common::config_flow::title::reauth%]", - "description": "The Withings integration needs to re-authenticate your account" + "description": "The {name} integration needs to re-authenticate your account" } }, "create_entry": { diff --git a/homeassistant/components/teslemetry/climate.py b/homeassistant/components/teslemetry/climate.py index 5b093b0c6f1..bd4fb0eba53 100644 --- a/homeassistant/components/teslemetry/climate.py +++ b/homeassistant/components/teslemetry/climate.py @@ -168,9 +168,8 @@ class TeslemetryClimateEntity(TeslemetryVehicleEntity, ClimateEntity): ) ) self._attr_preset_mode = preset_mode - if preset_mode == self._attr_preset_modes[0]: - self._attr_hvac_mode = HVACMode.OFF - else: + if preset_mode != self._attr_preset_modes[0]: + # Changing preset mode will also turn on climate self._attr_hvac_mode = HVACMode.HEAT_COOL self.async_write_ha_state() diff --git a/homeassistant/components/teslemetry/icons.json b/homeassistant/components/teslemetry/icons.json index aea98e95e0b..1912d2265f6 100644 --- a/homeassistant/components/teslemetry/icons.json +++ b/homeassistant/components/teslemetry/icons.json @@ -129,7 +129,6 @@ "off": "mdi:car-seat" } }, - "components_customer_preferred_export_rule": { "default": "mdi:transmission-tower", "state": { @@ -259,11 +258,23 @@ } }, "services": { - "navigation_gps_request": "mdi:crosshairs-gps", - "set_scheduled_charging": "mdi:timeline-clock-outline", - "set_scheduled_departure": "mdi:home-clock", - "speed_limit": "mdi:car-speed-limiter", - "valet_mode": "mdi:speedometer-slow", - "time_of_use": "mdi:clock-time-eight-outline" + "navigation_gps_request": { + "service": "mdi:crosshairs-gps" + }, + "set_scheduled_charging": { + "service": "mdi:timeline-clock-outline" + }, + "set_scheduled_departure": { + "service": "mdi:home-clock" + }, + "speed_limit": { + "service": "mdi:car-speed-limiter" + }, + "valet_mode": { + "service": "mdi:speedometer-slow" + }, + "time_of_use": { + "service": "mdi:clock-time-eight-outline" + } } } diff --git a/homeassistant/components/tessie/config_flow.py b/homeassistant/components/tessie/config_flow.py index 1cbc070e463..bee518ce95f 100644 --- a/homeassistant/components/tessie/config_flow.py +++ b/homeassistant/components/tessie/config_flow.py @@ -66,7 +66,7 @@ class TessieConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle re-auth.""" self._reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/text/icons.json b/homeassistant/components/text/icons.json index 355c439ec33..9448c9a7325 100644 --- a/homeassistant/components/text/icons.json +++ b/homeassistant/components/text/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:form-textbox" + "set_value": { + "service": "mdi:form-textbox" + } } } diff --git a/homeassistant/components/thethingsnetwork/config_flow.py b/homeassistant/components/thethingsnetwork/config_flow.py index cbb780e7064..7480e4cb1d9 100644 --- a/homeassistant/components/thethingsnetwork/config_flow.py +++ b/homeassistant/components/thethingsnetwork/config_flow.py @@ -89,7 +89,7 @@ class TTNFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow initialized by a reauth event.""" diff --git a/homeassistant/components/thread/config_flow.py b/homeassistant/components/thread/config_flow.py index b4b6eac0fc8..568b76d4999 100644 --- a/homeassistant/components/thread/config_flow.py +++ b/homeassistant/components/thread/config_flow.py @@ -15,9 +15,7 @@ class ThreadConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_import( - self, import_data: dict[str, str] | None = None - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: None) -> ConfigFlowResult: """Set up by import from async_setup.""" await self._async_handle_discovery_without_unique_id() return self.async_create_entry(title="Thread", data={}) diff --git a/homeassistant/components/tibber/__init__.py b/homeassistant/components/tibber/__init__.py index 51d6f0560f1..ce05b8070f6 100644 --- a/homeassistant/components/tibber/__init__.py +++ b/homeassistant/components/tibber/__init__.py @@ -18,14 +18,14 @@ from homeassistant.helpers import discovery from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.typing import ConfigType -from homeassistant.util import dt as dt_util +from homeassistant.util import dt as dt_util, ssl as ssl_util from .const import DATA_HASS_CONFIG, DOMAIN from .services import async_setup_services PLATFORMS = [Platform.NOTIFY, Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) _LOGGER = logging.getLogger(__name__) @@ -47,6 +47,7 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: access_token=entry.data[CONF_ACCESS_TOKEN], websession=async_get_clientsession(hass), time_zone=dt_util.get_default_time_zone(), + ssl=ssl_util.get_default_context(), ) hass.data[DOMAIN] = tibber_connection @@ -61,13 +62,13 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: except ( TimeoutError, aiohttp.ClientError, - tibber.RetryableHttpException, + tibber.RetryableHttpExceptionError, ) as err: raise ConfigEntryNotReady("Unable to connect") from err - except tibber.InvalidLogin as exp: + except tibber.InvalidLoginError as exp: _LOGGER.error("Failed to login. %s", exp) return False - except tibber.FatalHttpException: + except tibber.FatalHttpExceptionError: return False await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) diff --git a/homeassistant/components/tibber/config_flow.py b/homeassistant/components/tibber/config_flow.py index abee3ea50bc..2d4df5107a2 100644 --- a/homeassistant/components/tibber/config_flow.py +++ b/homeassistant/components/tibber/config_flow.py @@ -47,12 +47,12 @@ class TibberConfigFlow(ConfigFlow, domain=DOMAIN): await tibber_connection.update_info() except TimeoutError: errors[CONF_ACCESS_TOKEN] = ERR_TIMEOUT - except tibber.InvalidLogin: + except tibber.InvalidLoginError: errors[CONF_ACCESS_TOKEN] = ERR_TOKEN except ( aiohttp.ClientError, - tibber.RetryableHttpException, - tibber.FatalHttpException, + tibber.RetryableHttpExceptionError, + tibber.FatalHttpExceptionError, ): errors[CONF_ACCESS_TOKEN] = ERR_CLIENT diff --git a/homeassistant/components/tibber/coordinator.py b/homeassistant/components/tibber/coordinator.py index c3746cb9a58..78841f9db91 100644 --- a/homeassistant/components/tibber/coordinator.py +++ b/homeassistant/components/tibber/coordinator.py @@ -49,9 +49,9 @@ class TibberDataCoordinator(DataUpdateCoordinator[None]): await self._tibber_connection.fetch_consumption_data_active_homes() await self._tibber_connection.fetch_production_data_active_homes() await self._insert_statistics() - except tibber.RetryableHttpException as err: + except tibber.RetryableHttpExceptionError as err: raise UpdateFailed(f"Error communicating with API ({err.status})") from err - except tibber.FatalHttpException: + except tibber.FatalHttpExceptionError: # Fatal error. Reload config entry to show correct error. self.hass.async_create_task( self.hass.config_entries.async_reload(self.config_entry.entry_id) diff --git a/homeassistant/components/tibber/icons.json b/homeassistant/components/tibber/icons.json index c6cdd9b0e25..ddc8c735145 100644 --- a/homeassistant/components/tibber/icons.json +++ b/homeassistant/components/tibber/icons.json @@ -1,5 +1,7 @@ { "services": { - "get_prices": "mdi:cash" + "get_prices": { + "service": "mdi:cash" + } } } diff --git a/homeassistant/components/tibber/manifest.json b/homeassistant/components/tibber/manifest.json index 1d8120a4321..527364b6866 100644 --- a/homeassistant/components/tibber/manifest.json +++ b/homeassistant/components/tibber/manifest.json @@ -8,5 +8,5 @@ "iot_class": "cloud_polling", "loggers": ["tibber"], "quality_scale": "silver", - "requirements": ["pyTibber==0.28.2"] + "requirements": ["pyTibber==0.30.1"] } diff --git a/homeassistant/components/tibber/sensor.py b/homeassistant/components/tibber/sensor.py index a9090add49b..09b36f41929 100644 --- a/homeassistant/components/tibber/sensor.py +++ b/homeassistant/components/tibber/sensor.py @@ -383,6 +383,7 @@ class TibberSensorElPrice(TibberSensor): "off_peak_1": None, "peak": None, "off_peak_2": None, + "intraday_price_ranking": None, } self._attr_icon = ICON self._attr_unique_id = self._tibber_home.home_id @@ -411,8 +412,9 @@ class TibberSensorElPrice(TibberSensor): return res = self._tibber_home.current_price_data() - self._attr_native_value, price_level, self._last_updated = res + self._attr_native_value, price_level, self._last_updated, price_rank = res self._attr_extra_state_attributes["price_level"] = price_level + self._attr_extra_state_attributes["intraday_price_ranking"] = price_rank attrs = self._tibber_home.current_attributes() self._attr_extra_state_attributes.update(attrs) diff --git a/homeassistant/components/tile/config_flow.py b/homeassistant/components/tile/config_flow.py index 108d9b1b300..53425958341 100644 --- a/homeassistant/components/tile/config_flow.py +++ b/homeassistant/components/tile/config_flow.py @@ -71,11 +71,9 @@ class TileFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_create_entry(title=self._username, data=data) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" - return await self.async_step_user(import_config) + return await self.async_step_user(import_data) async def async_step_reauth( self, entry_data: Mapping[str, Any] diff --git a/homeassistant/components/time/icons.json b/homeassistant/components/time/icons.json index c08e457e04d..f172c28ae0d 100644 --- a/homeassistant/components/time/icons.json +++ b/homeassistant/components/time/icons.json @@ -5,6 +5,8 @@ } }, "services": { - "set_value": "mdi:clock-edit" + "set_value": { + "service": "mdi:clock-edit" + } } } diff --git a/homeassistant/components/timer/__init__.py b/homeassistant/components/timer/__init__.py index 3f2b4bd7f43..c2057551239 100644 --- a/homeassistant/components/timer/__init__.py +++ b/homeassistant/components/timer/__init__.py @@ -159,9 +159,9 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: {vol.Optional(ATTR_DURATION, default=DEFAULT_DURATION): cv.time_period}, "async_start", ) - component.async_register_entity_service(SERVICE_PAUSE, {}, "async_pause") - component.async_register_entity_service(SERVICE_CANCEL, {}, "async_cancel") - component.async_register_entity_service(SERVICE_FINISH, {}, "async_finish") + component.async_register_entity_service(SERVICE_PAUSE, None, "async_pause") + component.async_register_entity_service(SERVICE_CANCEL, None, "async_cancel") + component.async_register_entity_service(SERVICE_FINISH, None, "async_finish") component.async_register_entity_service( SERVICE_CHANGE, {vol.Optional(ATTR_DURATION, default=DEFAULT_DURATION): cv.time_period}, diff --git a/homeassistant/components/timer/icons.json b/homeassistant/components/timer/icons.json index 1e352f7280b..a5319688646 100644 --- a/homeassistant/components/timer/icons.json +++ b/homeassistant/components/timer/icons.json @@ -1,10 +1,22 @@ { "services": { - "start": "mdi:play", - "pause": "mdi:pause", - "cancel": "mdi:cancel", - "finish": "mdi:check", - "change": "mdi:pencil", - "reload": "mdi:reload" + "start": { + "service": "mdi:play" + }, + "pause": { + "service": "mdi:pause" + }, + "cancel": { + "service": "mdi:cancel" + }, + "finish": { + "service": "mdi:check" + }, + "change": { + "service": "mdi:pencil" + }, + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/todo/__init__.py b/homeassistant/components/todo/__init__.py index 5febc9561c4..d35d9d6bbea 100644 --- a/homeassistant/components/todo/__init__.py +++ b/homeassistant/components/todo/__init__.py @@ -183,7 +183,7 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) component.async_register_entity_service( TodoServices.REMOVE_COMPLETED_ITEMS, - {}, + None, _async_remove_completed_items, required_features=[TodoListEntityFeature.DELETE_TODO_ITEM], ) diff --git a/homeassistant/components/todo/icons.json b/homeassistant/components/todo/icons.json index 05c9af74630..4040a0c6b8f 100644 --- a/homeassistant/components/todo/icons.json +++ b/homeassistant/components/todo/icons.json @@ -5,10 +5,20 @@ } }, "services": { - "add_item": "mdi:clipboard-plus", - "get_items": "mdi:clipboard-arrow-down", - "remove_completed_items": "mdi:clipboard-remove", - "remove_item": "mdi:clipboard-minus", - "update_item": "mdi:clipboard-edit" + "add_item": { + "service": "mdi:clipboard-plus" + }, + "get_items": { + "service": "mdi:clipboard-arrow-down" + }, + "remove_completed_items": { + "service": "mdi:clipboard-remove" + }, + "remove_item": { + "service": "mdi:clipboard-minus" + }, + "update_item": { + "service": "mdi:clipboard-edit" + } } } diff --git a/homeassistant/components/todoist/calendar.py b/homeassistant/components/todoist/calendar.py index 1c6f40005c1..2acd4ea6dc6 100644 --- a/homeassistant/components/todoist/calendar.py +++ b/homeassistant/components/todoist/calendar.py @@ -21,7 +21,7 @@ from homeassistant.components.calendar import ( from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_ID, CONF_NAME, CONF_TOKEN, EVENT_HOMEASSISTANT_STOP from homeassistant.core import Event, HomeAssistant, ServiceCall, callback -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers.aiohttp_client import async_get_clientsession import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -54,6 +54,7 @@ from .const import ( REMINDER_DATE, REMINDER_DATE_LANG, REMINDER_DATE_STRING, + SECTION_NAME, SERVICE_NEW_TASK, START, SUMMARY, @@ -68,6 +69,7 @@ NEW_TASK_SERVICE_SCHEMA = vol.Schema( vol.Required(CONTENT): cv.string, vol.Optional(DESCRIPTION): cv.string, vol.Optional(PROJECT_NAME, default="inbox"): vol.All(cv.string, vol.Lower), + vol.Optional(SECTION_NAME): vol.All(cv.string, vol.Lower), vol.Optional(LABELS): cv.ensure_list_csv, vol.Optional(ASSIGNEE): cv.string, vol.Optional(PRIORITY): vol.All(vol.Coerce(int), vol.Range(min=1, max=4)), @@ -201,7 +203,7 @@ async def async_setup_platform( async_register_services(hass, coordinator) -def async_register_services( +def async_register_services( # noqa: C901 hass: HomeAssistant, coordinator: TodoistCoordinator ) -> None: """Register services.""" @@ -211,16 +213,42 @@ def async_register_services( session = async_get_clientsession(hass) - async def handle_new_task(call: ServiceCall) -> None: + async def handle_new_task(call: ServiceCall) -> None: # noqa: C901 """Call when a user creates a new Todoist Task from Home Assistant.""" - project_name = call.data[PROJECT_NAME].lower() + project_name = call.data[PROJECT_NAME] projects = await coordinator.async_get_projects() project_id: str | None = None for project in projects: if project_name == project.name.lower(): project_id = project.id + break if project_id is None: - raise HomeAssistantError(f"Invalid project name '{project_name}'") + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="project_invalid", + translation_placeholders={ + "project": project_name, + }, + ) + + # Optional section within project + section_id: str | None = None + if SECTION_NAME in call.data: + section_name = call.data[SECTION_NAME] + sections = await coordinator.async_get_sections(project_id) + for section in sections: + if section_name == section.name.lower(): + section_id = section.id + break + if section_id is None: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="section_invalid", + translation_placeholders={ + "section": section_name, + "project": project_name, + }, + ) # Create the task content = call.data[CONTENT] @@ -228,6 +256,10 @@ def async_register_services( if description := call.data.get(DESCRIPTION): data["description"] = description + + if section_id is not None: + data["section_id"] = section_id + if task_labels := call.data.get(LABELS): data["labels"] = task_labels diff --git a/homeassistant/components/todoist/const.py b/homeassistant/components/todoist/const.py index 1a66fc9764f..be95d57dd2c 100644 --- a/homeassistant/components/todoist/const.py +++ b/homeassistant/components/todoist/const.py @@ -78,6 +78,8 @@ PROJECT_ID: Final = "project_id" PROJECT_NAME: Final = "project" # Todoist API: Fetch all Projects PROJECTS: Final = "projects" +# Section Name: What Section of the Project do you want to add the Task to? +SECTION_NAME: Final = "section" # Calendar Platform: When does a calendar event start? START: Final = "start" # Calendar Platform: What is the next calendar event about? diff --git a/homeassistant/components/todoist/coordinator.py b/homeassistant/components/todoist/coordinator.py index e01b4ecb35a..b55680907ac 100644 --- a/homeassistant/components/todoist/coordinator.py +++ b/homeassistant/components/todoist/coordinator.py @@ -4,7 +4,7 @@ from datetime import timedelta import logging from todoist_api_python.api_async import TodoistAPIAsync -from todoist_api_python.models import Label, Project, Task +from todoist_api_python.models import Label, Project, Section, Task from homeassistant.core import HomeAssistant from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed @@ -41,6 +41,10 @@ class TodoistCoordinator(DataUpdateCoordinator[list[Task]]): self._projects = await self.api.get_projects() return self._projects + async def async_get_sections(self, project_id: str) -> list[Section]: + """Return todoist sections for a given project ID.""" + return await self.api.get_sections(project_id=project_id) + async def async_get_labels(self) -> list[Label]: """Return todoist labels fetched at most once.""" if self._labels is None: diff --git a/homeassistant/components/todoist/icons.json b/homeassistant/components/todoist/icons.json index d3b881d480c..73778f1ca23 100644 --- a/homeassistant/components/todoist/icons.json +++ b/homeassistant/components/todoist/icons.json @@ -1,5 +1,7 @@ { "services": { - "new_task": "mdi:checkbox-marked-circle-plus-outline" + "new_task": { + "service": "mdi:checkbox-marked-circle-plus-outline" + } } } diff --git a/homeassistant/components/todoist/services.yaml b/homeassistant/components/todoist/services.yaml index 1bd6320ebe3..17d877ea786 100644 --- a/homeassistant/components/todoist/services.yaml +++ b/homeassistant/components/todoist/services.yaml @@ -13,6 +13,10 @@ new_task: default: Inbox selector: text: + section: + example: Deliveries + selector: + text: labels: example: Chores,Delivieries selector: diff --git a/homeassistant/components/todoist/strings.json b/homeassistant/components/todoist/strings.json index 0cc74c9c8c6..5b083ac58bf 100644 --- a/homeassistant/components/todoist/strings.json +++ b/homeassistant/components/todoist/strings.json @@ -20,6 +20,14 @@ "default": "[%key:common::config_flow::create_entry::authenticated%]" } }, + "exceptions": { + "project_invalid": { + "message": "Invalid project name \"{project}\"" + }, + "section_invalid": { + "message": "Project \"{project}\" has no section \"{section}\"" + } + }, "services": { "new_task": { "name": "New task", @@ -37,6 +45,10 @@ "name": "Project", "description": "The name of the project this task should belong to." }, + "section": { + "name": "Section", + "description": "The name of a section within the project to add the task to." + }, "labels": { "name": "Labels", "description": "Any labels that you want to apply to this task, separated by a comma." diff --git a/homeassistant/components/toon/config_flow.py b/homeassistant/components/toon/config_flow.py index 40e83c3c9be..af9f7b06850 100644 --- a/homeassistant/components/toon/config_flow.py +++ b/homeassistant/components/toon/config_flow.py @@ -48,7 +48,7 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): return await self.async_step_agreement() async def async_step_import( - self, config: dict[str, Any] | None = None + self, import_data: dict[str, Any] | None ) -> ConfigFlowResult: """Start a configuration flow based on imported data. @@ -57,8 +57,8 @@ class ToonFlowHandler(AbstractOAuth2FlowHandler, domain=DOMAIN): the version 1 schema. """ - if config is not None and CONF_MIGRATE in config: - self.context.update({CONF_MIGRATE: config[CONF_MIGRATE]}) + if import_data is not None and CONF_MIGRATE in import_data: + self.context.update({CONF_MIGRATE: import_data[CONF_MIGRATE]}) else: await self._async_handle_discovery_without_unique_id() diff --git a/homeassistant/components/toon/icons.json b/homeassistant/components/toon/icons.json index 650bf0b6d19..217f1240893 100644 --- a/homeassistant/components/toon/icons.json +++ b/homeassistant/components/toon/icons.json @@ -1,5 +1,7 @@ { "services": { - "update": "mdi:update" + "update": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/totalconnect/__init__.py b/homeassistant/components/totalconnect/__init__.py index bb19697b1e7..0d8b915770a 100644 --- a/homeassistant/components/totalconnect/__init__.py +++ b/homeassistant/components/totalconnect/__init__.py @@ -7,15 +7,12 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryAuthFailed -import homeassistant.helpers.config_validation as cv from .const import AUTO_BYPASS, CONF_USERCODES, DOMAIN from .coordinator import TotalConnectDataUpdateCoordinator PLATFORMS = [Platform.ALARM_CONTROL_PANEL, Platform.BINARY_SENSOR, Platform.BUTTON] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up upon config entry in user interface.""" diff --git a/homeassistant/components/totalconnect/alarm_control_panel.py b/homeassistant/components/totalconnect/alarm_control_panel.py index 17a16674dd5..edbbbb06e70 100644 --- a/homeassistant/components/totalconnect/alarm_control_panel.py +++ b/homeassistant/components/totalconnect/alarm_control_panel.py @@ -55,13 +55,13 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_ALARM_ARM_AWAY_INSTANT, - {}, + None, "async_alarm_arm_away_instant", ) platform.async_register_entity_service( SERVICE_ALARM_ARM_HOME_INSTANT, - {}, + None, "async_alarm_arm_home_instant", ) @@ -103,6 +103,7 @@ class TotalConnectAlarm(TotalConnectLocationEntity, AlarmControlPanelEntity): @property def state(self) -> str | None: """Return the state of the device.""" + # State attributes can be removed in 2025.3 attr = { "location_id": self._location.location_id, "partition": self._partition_id, diff --git a/homeassistant/components/totalconnect/config_flow.py b/homeassistant/components/totalconnect/config_flow.py index 19d8f09933e..63973fd44e9 100644 --- a/homeassistant/components/totalconnect/config_flow.py +++ b/homeassistant/components/totalconnect/config_flow.py @@ -28,14 +28,16 @@ class TotalConnectConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" self.username = None self.password = None - self.usercodes = {} + self.usercodes: dict[str, Any] = {} self.client = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initiated by the user.""" errors = {} diff --git a/homeassistant/components/totalconnect/icons.json b/homeassistant/components/totalconnect/icons.json index cb62a79c7bb..a21df03e15d 100644 --- a/homeassistant/components/totalconnect/icons.json +++ b/homeassistant/components/totalconnect/icons.json @@ -10,7 +10,11 @@ } }, "services": { - "arm_away_instant": "mdi:shield-lock", - "arm_home_instant": "mdi:shield-home" + "arm_away_instant": { + "service": "mdi:shield-lock" + }, + "arm_home_instant": { + "service": "mdi:shield-home" + } } } diff --git a/homeassistant/components/touchline_sl/__init__.py b/homeassistant/components/touchline_sl/__init__.py new file mode 100644 index 00000000000..45a85185673 --- /dev/null +++ b/homeassistant/components/touchline_sl/__init__.py @@ -0,0 +1,63 @@ +"""The Roth Touchline SL integration.""" + +from __future__ import annotations + +import asyncio + +from pytouchlinesl import TouchlineSL + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .const import DOMAIN +from .coordinator import TouchlineSLModuleCoordinator + +PLATFORMS: list[Platform] = [Platform.CLIMATE] + +type TouchlineSLConfigEntry = ConfigEntry[list[TouchlineSLModuleCoordinator]] + + +async def async_setup_entry(hass: HomeAssistant, entry: TouchlineSLConfigEntry) -> bool: + """Set up Roth Touchline SL from a config entry.""" + account = TouchlineSL( + username=entry.data[CONF_USERNAME], password=entry.data[CONF_PASSWORD] + ) + + coordinators: list[TouchlineSLModuleCoordinator] = [ + TouchlineSLModuleCoordinator(hass, module) for module in await account.modules() + ] + + await asyncio.gather( + *[ + coordinator.async_config_entry_first_refresh() + for coordinator in coordinators + ] + ) + + device_registry = dr.async_get(hass) + + # Create a new Device for each coorodinator to represent each module + for c in coordinators: + module = c.data.module + device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + identifiers={(DOMAIN, module.id)}, + name=module.name, + manufacturer="Roth", + model=module.type, + sw_version=module.version, + ) + + entry.runtime_data = coordinators + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + + return True + + +async def async_unload_entry( + hass: HomeAssistant, entry: TouchlineSLConfigEntry +) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) diff --git a/homeassistant/components/touchline_sl/climate.py b/homeassistant/components/touchline_sl/climate.py new file mode 100644 index 00000000000..93328823749 --- /dev/null +++ b/homeassistant/components/touchline_sl/climate.py @@ -0,0 +1,126 @@ +"""Roth Touchline SL climate integration implementation for Home Assistant.""" + +from typing import Any + +from pytouchlinesl import Zone + +from homeassistant.components.climate import ( + ClimateEntity, + ClimateEntityFeature, + HVACMode, +) +from homeassistant.const import ATTR_TEMPERATURE, UnitOfTemperature +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.update_coordinator import CoordinatorEntity + +from . import TouchlineSLConfigEntry +from .const import DOMAIN +from .coordinator import TouchlineSLModuleCoordinator + + +async def async_setup_entry( + hass: HomeAssistant, + entry: TouchlineSLConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Touchline devices.""" + coordinators = entry.runtime_data + async_add_entities( + TouchlineSLZone(coordinator=coordinator, zone_id=zone_id) + for coordinator in coordinators + for zone_id in coordinator.data.zones + ) + + +CONSTANT_TEMPERATURE = "constant_temperature" + + +class TouchlineSLZone(CoordinatorEntity[TouchlineSLModuleCoordinator], ClimateEntity): + """Roth Touchline SL Zone.""" + + _attr_has_entity_name = True + _attr_hvac_mode = HVACMode.HEAT + _attr_hvac_modes = [HVACMode.HEAT] + _attr_name = None + _attr_supported_features = ( + ClimateEntityFeature.TARGET_TEMPERATURE | ClimateEntityFeature.PRESET_MODE + ) + _attr_temperature_unit = UnitOfTemperature.CELSIUS + _attr_translation_key = "zone" + + def __init__(self, coordinator: TouchlineSLModuleCoordinator, zone_id: int) -> None: + """Construct a Touchline SL climate zone.""" + super().__init__(coordinator) + self.zone_id: int = zone_id + + self._attr_unique_id = ( + f"module-{self.coordinator.data.module.id}-zone-{self.zone_id}" + ) + + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, str(zone_id))}, + name=self.zone.name, + manufacturer="Roth", + via_device=(DOMAIN, coordinator.data.module.id), + model="zone", + suggested_area=self.zone.name, + ) + + # Call this in __init__ so data is populated right away, since it's + # already available in the coordinator data. + self.set_attr() + + @callback + def _handle_coordinator_update(self) -> None: + """Handle updated data from the coordinator.""" + self.set_attr() + super()._handle_coordinator_update() + + @property + def zone(self) -> Zone: + """Return the device object from the coordinator data.""" + return self.coordinator.data.zones[self.zone_id] + + @property + def available(self) -> bool: + """Return if the device is available.""" + return super().available and self.zone_id in self.coordinator.data.zones + + async def async_set_temperature(self, **kwargs: Any) -> None: + """Set new target temperature.""" + if (temperature := kwargs.get(ATTR_TEMPERATURE)) is None: + return + + await self.zone.set_temperature(temperature) + await self.coordinator.async_request_refresh() + + async def async_set_preset_mode(self, preset_mode: str) -> None: + """Assign the zone to a particular global schedule.""" + if not self.zone: + return + + if preset_mode == CONSTANT_TEMPERATURE and self._attr_target_temperature: + await self.zone.set_temperature(temperature=self._attr_target_temperature) + await self.coordinator.async_request_refresh() + return + + if schedule := self.coordinator.data.schedules[preset_mode]: + await self.zone.set_schedule(schedule_id=schedule.id) + await self.coordinator.async_request_refresh() + + def set_attr(self) -> None: + """Populate attributes with data from the coordinator.""" + schedule_names = self.coordinator.data.schedules.keys() + + self._attr_current_temperature = self.zone.temperature + self._attr_target_temperature = self.zone.target_temperature + self._attr_current_humidity = int(self.zone.humidity) + self._attr_preset_modes = [*schedule_names, CONSTANT_TEMPERATURE] + + if self.zone.mode == "constantTemp": + self._attr_preset_mode = CONSTANT_TEMPERATURE + elif self.zone.mode == "globalSchedule": + schedule = self.zone.schedule + self._attr_preset_mode = schedule.name diff --git a/homeassistant/components/touchline_sl/config_flow.py b/homeassistant/components/touchline_sl/config_flow.py new file mode 100644 index 00000000000..91d959b5a0a --- /dev/null +++ b/homeassistant/components/touchline_sl/config_flow.py @@ -0,0 +1,62 @@ +"""Config flow for Roth Touchline SL integration.""" + +from __future__ import annotations + +import logging +from typing import Any + +from pytouchlinesl import TouchlineSL +from pytouchlinesl.client import RothAPIError +import voluptuous as vol + +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + +STEP_USER_DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): str, + vol.Required(CONF_PASSWORD): str, + } +) + + +class TouchlineSLConfigFlow(ConfigFlow, domain=DOMAIN): + """Handle a config flow for Roth Touchline SL.""" + + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle the initial step that gathers username and password.""" + errors: dict[str, str] = {} + + if user_input is not None: + try: + account = TouchlineSL( + username=user_input[CONF_USERNAME], + password=user_input[CONF_PASSWORD], + ) + await account.user_id() + except RothAPIError as e: + if e.status == 401: + errors["base"] = "invalid_auth" + else: + errors["base"] = "cannot_connect" + except Exception: + _LOGGER.exception("Unexpected exception") + errors["base"] = "unknown" + else: + unique_account_id = await account.user_id() + await self.async_set_unique_id(str(unique_account_id)) + self._abort_if_unique_id_configured() + + return self.async_create_entry( + title=user_input[CONF_USERNAME], data=user_input + ) + + return self.async_show_form( + step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors + ) diff --git a/homeassistant/components/touchline_sl/const.py b/homeassistant/components/touchline_sl/const.py new file mode 100644 index 00000000000..e441e3721b3 --- /dev/null +++ b/homeassistant/components/touchline_sl/const.py @@ -0,0 +1,3 @@ +"""Constants for the Roth Touchline SL integration.""" + +DOMAIN = "touchline_sl" diff --git a/homeassistant/components/touchline_sl/coordinator.py b/homeassistant/components/touchline_sl/coordinator.py new file mode 100644 index 00000000000..cd74ba6130f --- /dev/null +++ b/homeassistant/components/touchline_sl/coordinator.py @@ -0,0 +1,59 @@ +"""Define an object to manage fetching Touchline SL data.""" + +from __future__ import annotations + +from dataclasses import dataclass +from datetime import timedelta +import logging + +from pytouchlinesl import Module, Zone +from pytouchlinesl.client import RothAPIError +from pytouchlinesl.client.models import GlobalScheduleModel + +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed +from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed + +_LOGGER = logging.getLogger(__name__) + + +@dataclass +class TouchlineSLModuleData: + """Provide type safe way of accessing module data from the coordinator.""" + + module: Module + zones: dict[int, Zone] + schedules: dict[str, GlobalScheduleModel] + + +class TouchlineSLModuleCoordinator(DataUpdateCoordinator[TouchlineSLModuleData]): + """A coordinator to manage the fetching of Touchline SL data.""" + + def __init__(self, hass: HomeAssistant, module: Module) -> None: + """Initialize coordinator.""" + super().__init__( + hass, + logger=_LOGGER, + name=f"Touchline SL ({module.name})", + update_interval=timedelta(seconds=30), + ) + + self.module = module + + async def _async_update_data(self) -> TouchlineSLModuleData: + """Fetch data from the upstream API and pre-process into the right format.""" + try: + zones = await self.module.zones() + schedules = await self.module.schedules() + except RothAPIError as error: + if error.status == 401: + # Trigger a reauthentication if the data update fails due to + # bad authentication. + raise ConfigEntryAuthFailed from error + raise UpdateFailed(error) from error + + return TouchlineSLModuleData( + module=self.module, + zones={z.id: z for z in zones}, + schedules={s.name: s for s in schedules}, + ) diff --git a/homeassistant/components/touchline_sl/manifest.json b/homeassistant/components/touchline_sl/manifest.json new file mode 100644 index 00000000000..8a50b06d613 --- /dev/null +++ b/homeassistant/components/touchline_sl/manifest.json @@ -0,0 +1,10 @@ +{ + "domain": "touchline_sl", + "name": "Roth Touchline SL", + "codeowners": ["@jnsgruk"], + "config_flow": true, + "documentation": "https://www.home-assistant.io/integrations/touchline_sl", + "integration_type": "hub", + "iot_class": "cloud_polling", + "requirements": ["pytouchlinesl==0.1.5"] +} diff --git a/homeassistant/components/touchline_sl/strings.json b/homeassistant/components/touchline_sl/strings.json new file mode 100644 index 00000000000..e3a0ef5a741 --- /dev/null +++ b/homeassistant/components/touchline_sl/strings.json @@ -0,0 +1,36 @@ +{ + "config": { + "flow_title": "Touchline SL Setup Flow", + "error": { + "cannot_connect": "[%key:common::config_flow::error::cannot_connect%]", + "invalid_auth": "[%key:common::config_flow::error::invalid_auth%]", + "unknown": "[%key:common::config_flow::error::unknown%]" + }, + "step": { + "user": { + "title": "Login to Touchline SL", + "description": "Your credentials for the Roth Touchline SL mobile app/web service", + "data": { + "username": "[%key:common::config_flow::data::username%]", + "password": "[%key:common::config_flow::data::password%]" + } + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" + } + }, + "entity": { + "climate": { + "zone": { + "state_attributes": { + "preset_mode": { + "state": { + "constant_temperature": "Constant temperature" + } + } + } + } + } + } +} diff --git a/homeassistant/components/tplink/config_flow.py b/homeassistant/components/tplink/config_flow.py index a0f0ca6eb76..1c02466aef1 100644 --- a/homeassistant/components/tplink/config_flow.py +++ b/homeassistant/components/tplink/config_flow.py @@ -410,9 +410,18 @@ class TPLinkConfigFlow(ConfigFlow, domain=DOMAIN): self._discovered_device = await Discover.discover_single( host, credentials=credentials ) - except TimeoutError: - # Try connect() to legacy devices if discovery fails - self._discovered_device = await Device.connect(config=DeviceConfig(host)) + except TimeoutError as ex: + # Try connect() to legacy devices if discovery fails. This is a + # fallback mechanism for legacy that can handle connections without + # discovery info but if it fails raise the original error which is + # applicable for newer devices. + try: + self._discovered_device = await Device.connect( + config=DeviceConfig(host) + ) + except Exception: # noqa: BLE001 + # Raise the original error instead of the fallback error + raise ex from ex else: if self._discovered_device.config.uses_http: self._discovered_device.config.http_client = ( diff --git a/homeassistant/components/tplink/entity.py b/homeassistant/components/tplink/entity.py index 4ec0480cf82..beb71d4e5ce 100644 --- a/homeassistant/components/tplink/entity.py +++ b/homeassistant/components/tplink/entity.py @@ -68,6 +68,8 @@ EXCLUDED_FEATURES = { # update "current_firmware_version", "available_firmware_version", + "update_available", + "check_latest_firmware", } diff --git a/homeassistant/components/tplink/icons.json b/homeassistant/components/tplink/icons.json index 3da3b4806d3..96ea8f41bb7 100644 --- a/homeassistant/components/tplink/icons.json +++ b/homeassistant/components/tplink/icons.json @@ -109,7 +109,11 @@ } }, "services": { - "sequence_effect": "mdi:playlist-play", - "random_effect": "mdi:shuffle-variant" + "sequence_effect": { + "service": "mdi:playlist-play" + }, + "random_effect": { + "service": "mdi:shuffle-variant" + } } } diff --git a/homeassistant/components/tplink/manifest.json b/homeassistant/components/tplink/manifest.json index 10b0ef61153..0d9761ec8ce 100644 --- a/homeassistant/components/tplink/manifest.json +++ b/homeassistant/components/tplink/manifest.json @@ -301,5 +301,5 @@ "iot_class": "local_polling", "loggers": ["kasa"], "quality_scale": "platinum", - "requirements": ["python-kasa[speedups]==0.7.1"] + "requirements": ["python-kasa[speedups]==0.7.2"] } diff --git a/homeassistant/components/tplink/sensor.py b/homeassistant/components/tplink/sensor.py index 3da414d74d3..1307079937f 100644 --- a/homeassistant/components/tplink/sensor.py +++ b/homeassistant/components/tplink/sensor.py @@ -154,7 +154,5 @@ class TPLinkSensorEntity(CoordinatedTPLinkFeatureEntity, SensorEntity): self._attr_native_value = value # Map to homeassistant units and fallback to upstream one if none found - if self._feature.unit is not None: - self._attr_native_unit_of_measurement = UNIT_MAPPING.get( - self._feature.unit, self._feature.unit - ) + if (unit := self._feature.unit) is not None: + self._attr_native_unit_of_measurement = UNIT_MAPPING.get(unit, unit) diff --git a/homeassistant/components/traccar_server/config_flow.py b/homeassistant/components/traccar_server/config_flow.py index 45a43c08685..a4d109030ae 100644 --- a/homeassistant/components/traccar_server/config_flow.py +++ b/homeassistant/components/traccar_server/config_flow.py @@ -2,7 +2,6 @@ from __future__ import annotations -from collections.abc import Mapping from typing import Any from pytraccar import ApiClient, ServerModel, TraccarException @@ -161,36 +160,34 @@ class TraccarServerConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import( - self, import_info: Mapping[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import an entry.""" - configured_port = str(import_info[CONF_PORT]) + configured_port = str(import_data[CONF_PORT]) self._async_abort_entries_match( { - CONF_HOST: import_info[CONF_HOST], + CONF_HOST: import_data[CONF_HOST], CONF_PORT: configured_port, } ) - if "all_events" in (imported_events := import_info.get("event", [])): + if "all_events" in (imported_events := import_data.get("event", [])): events = list(EVENTS.values()) else: events = imported_events return self.async_create_entry( - title=f"{import_info[CONF_HOST]}:{configured_port}", + title=f"{import_data[CONF_HOST]}:{configured_port}", data={ - CONF_HOST: import_info[CONF_HOST], + CONF_HOST: import_data[CONF_HOST], CONF_PORT: configured_port, - CONF_SSL: import_info.get(CONF_SSL, False), - CONF_VERIFY_SSL: import_info.get(CONF_VERIFY_SSL, True), - CONF_USERNAME: import_info[CONF_USERNAME], - CONF_PASSWORD: import_info[CONF_PASSWORD], + CONF_SSL: import_data.get(CONF_SSL, False), + CONF_VERIFY_SSL: import_data.get(CONF_VERIFY_SSL, True), + CONF_USERNAME: import_data[CONF_USERNAME], + CONF_PASSWORD: import_data[CONF_PASSWORD], }, options={ - CONF_MAX_ACCURACY: import_info[CONF_MAX_ACCURACY], + CONF_MAX_ACCURACY: import_data[CONF_MAX_ACCURACY], CONF_EVENTS: events, - CONF_CUSTOM_ATTRIBUTES: import_info.get("monitored_conditions", []), - CONF_SKIP_ACCURACY_FILTER_FOR: import_info.get( + CONF_CUSTOM_ATTRIBUTES: import_data.get("monitored_conditions", []), + CONF_SKIP_ACCURACY_FILTER_FOR: import_data.get( "skip_accuracy_filter_on", [] ), }, diff --git a/homeassistant/components/tradfri/__init__.py b/homeassistant/components/tradfri/__init__.py index 2e267ffaa14..0060310e6c2 100644 --- a/homeassistant/components/tradfri/__init__.py +++ b/homeassistant/components/tradfri/__init__.py @@ -14,7 +14,6 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, EVENT_HOMEASSISTANT_STOP, Platform from homeassistant.core import Event, HomeAssistant, callback from homeassistant.exceptions import ConfigEntryNotReady -import homeassistant.helpers.config_validation as cv import homeassistant.helpers.device_registry as dr from homeassistant.helpers.dispatcher import ( async_dispatcher_connect, @@ -35,7 +34,6 @@ from .const import ( ) from .coordinator import TradfriDeviceDataUpdateCoordinator -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) PLATFORMS = [ Platform.COVER, Platform.FAN, diff --git a/homeassistant/components/trafikverket_train/__init__.py b/homeassistant/components/trafikverket_train/__init__.py index 4bf1f681807..3e807df9301 100644 --- a/homeassistant/components/trafikverket_train/__init__.py +++ b/homeassistant/components/trafikverket_train/__init__.py @@ -2,21 +2,11 @@ from __future__ import annotations -from pytrafikverket import TrafikverketTrain -from pytrafikverket.exceptions import ( - InvalidAuthentication, - MultipleTrainStationsFound, - NoTrainStationFound, -) - from homeassistant.config_entries import ConfigEntry -from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady from homeassistant.helpers import entity_registry as er -from homeassistant.helpers.aiohttp_client import async_get_clientsession -from .const import CONF_FROM, CONF_TO, PLATFORMS +from .const import PLATFORMS from .coordinator import TVDataUpdateCoordinator TVTrainConfigEntry = ConfigEntry[TVDataUpdateCoordinator] @@ -25,21 +15,7 @@ TVTrainConfigEntry = ConfigEntry[TVDataUpdateCoordinator] async def async_setup_entry(hass: HomeAssistant, entry: TVTrainConfigEntry) -> bool: """Set up Trafikverket Train from a config entry.""" - http_session = async_get_clientsession(hass) - train_api = TrafikverketTrain(http_session, entry.data[CONF_API_KEY]) - - try: - to_station = await train_api.async_get_train_station(entry.data[CONF_TO]) - from_station = await train_api.async_get_train_station(entry.data[CONF_FROM]) - except InvalidAuthentication as error: - raise ConfigEntryAuthFailed from error - except (NoTrainStationFound, MultipleTrainStationsFound) as error: - raise ConfigEntryNotReady( - f"Problem when trying station {entry.data[CONF_FROM]} to" - f" {entry.data[CONF_TO]}. Error: {error} " - ) from error - - coordinator = TVDataUpdateCoordinator(hass, to_station, from_station) + coordinator = TVDataUpdateCoordinator(hass) await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/trafikverket_train/coordinator.py b/homeassistant/components/trafikverket_train/coordinator.py index 66ef3e6a1d2..16a7a649b85 100644 --- a/homeassistant/components/trafikverket_train/coordinator.py +++ b/homeassistant/components/trafikverket_train/coordinator.py @@ -10,7 +10,9 @@ from typing import TYPE_CHECKING from pytrafikverket import TrafikverketTrain from pytrafikverket.exceptions import ( InvalidAuthentication, + MultipleTrainStationsFound, NoTrainAnnouncementFound, + NoTrainStationFound, UnknownError, ) from pytrafikverket.models import StationInfoModel, TrainStopModel @@ -22,7 +24,7 @@ from homeassistant.helpers.aiohttp_client import async_get_clientsession from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed from homeassistant.util import dt as dt_util -from .const import CONF_FILTER_PRODUCT, CONF_TIME, DOMAIN +from .const import CONF_FILTER_PRODUCT, CONF_FROM, CONF_TIME, CONF_TO, DOMAIN from .util import next_departuredate if TYPE_CHECKING: @@ -69,13 +71,10 @@ class TVDataUpdateCoordinator(DataUpdateCoordinator[TrainData]): """A Trafikverket Data Update Coordinator.""" config_entry: TVTrainConfigEntry + from_station: StationInfoModel + to_station: StationInfoModel - def __init__( - self, - hass: HomeAssistant, - to_station: StationInfoModel, - from_station: StationInfoModel, - ) -> None: + def __init__(self, hass: HomeAssistant) -> None: """Initialize the Trafikverket coordinator.""" super().__init__( hass, @@ -86,14 +85,29 @@ class TVDataUpdateCoordinator(DataUpdateCoordinator[TrainData]): self._train_api = TrafikverketTrain( async_get_clientsession(hass), self.config_entry.data[CONF_API_KEY] ) - self.from_station: StationInfoModel = from_station - self.to_station: StationInfoModel = to_station self._time: time | None = dt_util.parse_time(self.config_entry.data[CONF_TIME]) self._weekdays: list[str] = self.config_entry.data[CONF_WEEKDAY] self._filter_product: str | None = self.config_entry.options.get( CONF_FILTER_PRODUCT ) + async def _async_setup(self) -> None: + """Initiate stations.""" + try: + self.to_station = await self._train_api.async_get_train_station( + self.config_entry.data[CONF_TO] + ) + self.from_station = await self._train_api.async_get_train_station( + self.config_entry.data[CONF_FROM] + ) + except InvalidAuthentication as error: + raise ConfigEntryAuthFailed from error + except (NoTrainStationFound, MultipleTrainStationsFound) as error: + raise UpdateFailed( + f"Problem when trying station {self.config_entry.data[CONF_FROM]} to" + f" {self.config_entry.data[CONF_TO]}. Error: {error} " + ) from error + async def _async_update_data(self) -> TrainData: """Fetch data from Trafikverket.""" diff --git a/homeassistant/components/transmission/icons.json b/homeassistant/components/transmission/icons.json index 56ae46f933d..4458f510951 100644 --- a/homeassistant/components/transmission/icons.json +++ b/homeassistant/components/transmission/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_torrent": "mdi:download", - "remove_torrent": "mdi:download-off", - "start_torrent": "mdi:play", - "stop_torrent": "mdi:stop" + "add_torrent": { + "service": "mdi:download" + }, + "remove_torrent": { + "service": "mdi:download-off" + }, + "start_torrent": { + "service": "mdi:play" + }, + "stop_torrent": { + "service": "mdi:stop" + } } } diff --git a/homeassistant/components/trend/icons.json b/homeassistant/components/trend/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/trend/icons.json +++ b/homeassistant/components/trend/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/tts/__init__.py b/homeassistant/components/tts/__init__.py index 5286b01f67f..70bb2b4c713 100644 --- a/homeassistant/components/tts/__init__.py +++ b/homeassistant/components/tts/__init__.py @@ -137,15 +137,16 @@ def async_default_engine(hass: HomeAssistant) -> str | None: component: EntityComponent[TextToSpeechEntity] = hass.data[DOMAIN] manager: SpeechManager = hass.data[DATA_TTS_MANAGER] - if "cloud" in manager.providers: - return "cloud" + default_entity_id: str | None = None - entity = next(iter(component.entities), None) + for entity in component.entities: + if entity.platform and entity.platform.platform_name == "cloud": + return entity.entity_id - if entity is not None: - return entity.entity_id + if default_entity_id is None: + default_entity_id = entity.entity_id - return next(iter(manager.providers), None) + return default_entity_id or next(iter(manager.providers), None) @callback @@ -1085,6 +1086,7 @@ def websocket_list_engines( language = msg.get("language") providers = [] provider_info: dict[str, Any] + entity_domains: set[str] = set() for entity in component.entities: provider_info = { @@ -1096,15 +1098,20 @@ def websocket_list_engines( language, entity.supported_languages, country ) providers.append(provider_info) + if entity.platform: + entity_domains.add(entity.platform.platform_name) for engine_id, provider in manager.providers.items(): provider_info = { "engine_id": engine_id, + "name": provider.name, "supported_languages": provider.supported_languages, } if language: provider_info["supported_languages"] = language_util.matches( language, provider.supported_languages, country ) + if engine_id in entity_domains: + provider_info["deprecated"] = True providers.append(provider_info) connection.send_message( @@ -1147,6 +1154,8 @@ def websocket_get_engine( "engine_id": engine_id, "supported_languages": provider.supported_languages, } + if isinstance(provider, Provider): + provider_info["name"] = provider.name connection.send_message( websocket_api.result_message(msg["id"], {"provider": provider_info}) diff --git a/homeassistant/components/tts/icons.json b/homeassistant/components/tts/icons.json index cda5f877b25..8cfae7cc8e9 100644 --- a/homeassistant/components/tts/icons.json +++ b/homeassistant/components/tts/icons.json @@ -5,8 +5,14 @@ } }, "services": { - "clear_cache": "mdi:delete", - "say": "mdi:speaker-message", - "speak": "mdi:speaker-message" + "clear_cache": { + "service": "mdi:delete" + }, + "say": { + "service": "mdi:speaker-message" + }, + "speak": { + "service": "mdi:speaker-message" + } } } diff --git a/homeassistant/components/tuya/config_flow.py b/homeassistant/components/tuya/config_flow.py index bdef321de7a..104c3b7c9fa 100644 --- a/homeassistant/components/tuya/config_flow.py +++ b/homeassistant/components/tuya/config_flow.py @@ -146,7 +146,9 @@ class TuyaConfigFlow(ConfigFlow, domain=DOMAIN): data=entry_data, ) - async def async_step_reauth(self, _: Mapping[str, Any]) -> ConfigFlowResult: + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: """Handle initiation of re-authentication with Tuya.""" self.__reauth_entry = self.hass.config_entries.async_get_entry( self.context["entry_id"] diff --git a/homeassistant/components/twinkly/config_flow.py b/homeassistant/components/twinkly/config_flow.py index 98802c8bd33..68c455dc619 100644 --- a/homeassistant/components/twinkly/config_flow.py +++ b/homeassistant/components/twinkly/config_flow.py @@ -28,7 +28,9 @@ class TwinklyConfigFlow(ConfigFlow, domain=DOMAIN): """Initialize the config flow.""" self._discovered_device: tuple[dict[str, Any], str] | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle config steps.""" host = user_input[CONF_HOST] if user_input else None diff --git a/homeassistant/components/ukraine_alarm/config_flow.py b/homeassistant/components/ukraine_alarm/config_flow.py index bafe6d1fe11..faaa9240df3 100644 --- a/homeassistant/components/ukraine_alarm/config_flow.py +++ b/homeassistant/components/ukraine_alarm/config_flow.py @@ -3,12 +3,13 @@ from __future__ import annotations import logging +from typing import Any import aiohttp from uasiren.client import Client import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_NAME, CONF_REGION from homeassistant.helpers.aiohttp_client import async_get_clientsession @@ -22,12 +23,14 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize a new UkraineAlarmConfigFlow.""" self.states = None - self.selected_region = None + self.selected_region: dict[str, Any] | None = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" if len(self._async_current_entries()) == 5: @@ -91,7 +94,11 @@ class UkraineAlarmConfigFlow(ConfigFlow, domain=DOMAIN): ): self.selected_region = _find(source, user_input[CONF_REGION]) - if next_step and self.selected_region["regionChildIds"]: + if ( + next_step + and self.selected_region + and self.selected_region["regionChildIds"] + ): return await getattr(self, f"async_step_{next_step}")() return await self._async_finish_flow() diff --git a/homeassistant/components/unifi/icons.json b/homeassistant/components/unifi/icons.json index 2d5017a3187..b089d8eff9c 100644 --- a/homeassistant/components/unifi/icons.json +++ b/homeassistant/components/unifi/icons.json @@ -1,6 +1,10 @@ { "services": { - "reconnect_client": "mdi:sync", - "remove_clients": "mdi:delete" + "reconnect_client": { + "service": "mdi:sync" + }, + "remove_clients": { + "service": "mdi:delete" + } } } diff --git a/homeassistant/components/unifi/sensor.py b/homeassistant/components/unifi/sensor.py index 08bd0ddb869..697df00fe55 100644 --- a/homeassistant/components/unifi/sensor.py +++ b/homeassistant/components/unifi/sensor.py @@ -21,7 +21,11 @@ from aiounifi.interfaces.ports import Ports from aiounifi.interfaces.wlans import Wlans from aiounifi.models.api import ApiItemT from aiounifi.models.client import Client -from aiounifi.models.device import Device, TypedDeviceUptimeStatsWanMonitor +from aiounifi.models.device import ( + Device, + TypedDeviceTemperature, + TypedDeviceUptimeStatsWanMonitor, +) from aiounifi.models.outlet import Outlet from aiounifi.models.port import Port from aiounifi.models.wlan import Wlan @@ -170,6 +174,12 @@ def async_device_outlet_supported_fn(hub: UnifiHub, obj_id: str) -> bool: return hub.api.devices[obj_id].outlet_ac_power_budget is not None +@callback +def async_device_uplink_mac_supported_fn(hub: UnifiHub, obj_id: str) -> bool: + """Determine if a device supports reading uplink MAC address.""" + return "uplink_mac" in hub.api.devices[obj_id].raw.get("uplink", {}) + + def device_system_stats_supported_fn( stat_index: int, hub: UnifiHub, obj_id: str ) -> bool: @@ -280,6 +290,72 @@ def make_wan_latency_sensors() -> tuple[UnifiSensorEntityDescription, ...]: ) +@callback +def async_device_temperatures_value_fn( + temperature_name: str, hub: UnifiHub, device: Device +) -> float: + """Retrieve the temperature of the device.""" + return_value: float = 0 + if device.temperatures: + temperature = _device_temperature(temperature_name, device.temperatures) + return_value = temperature if temperature is not None else 0 + return return_value + + +@callback +def async_device_temperatures_supported_fn( + temperature_name: str, hub: UnifiHub, obj_id: str +) -> bool: + """Determine if an device have a temperatures.""" + if (device := hub.api.devices[obj_id]) and device.temperatures: + return _device_temperature(temperature_name, device.temperatures) is not None + return False + + +@callback +def _device_temperature( + temperature_name: str, temperatures: list[TypedDeviceTemperature] +) -> float | None: + """Return the temperature of the device.""" + for temperature in temperatures: + if temperature_name in temperature["name"]: + return temperature["value"] + return None + + +def make_device_temperatur_sensors() -> tuple[UnifiSensorEntityDescription, ...]: + """Create device temperature sensors.""" + + def make_device_temperature_entity_description( + name: str, + ) -> UnifiSensorEntityDescription: + return UnifiSensorEntityDescription[Devices, Device]( + key=f"Device {name} temperature", + device_class=SensorDeviceClass.TEMPERATURE, + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + native_unit_of_measurement=UnitOfTemperature.CELSIUS, + entity_registry_enabled_default=False, + api_handler_fn=lambda api: api.devices, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda device: f"{device.name} {name} Temperature", + object_fn=lambda api, obj_id: api.devices[obj_id], + supported_fn=partial(async_device_temperatures_supported_fn, name), + unique_id_fn=lambda hub, obj_id: f"temperature-{slugify(name)}-{obj_id}", + value_fn=partial(async_device_temperatures_value_fn, name), + ) + + return tuple( + make_device_temperature_entity_description(name) + for name in ( + "CPU", + "Local", + "PHY", + ) + ) + + @dataclass(frozen=True, kw_only=True) class UnifiSensorEntityDescription( SensorEntityDescription, UnifiEntityDescription[HandlerT, ApiItemT] @@ -501,6 +577,19 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( unique_id_fn=lambda hub, obj_id: f"device_temperature-{obj_id}", value_fn=lambda hub, device: device.general_temperature, ), + UnifiSensorEntityDescription[Devices, Device]( + key="Device Uplink MAC", + entity_category=EntityCategory.DIAGNOSTIC, + api_handler_fn=lambda api: api.devices, + available_fn=async_device_available_fn, + device_info_fn=async_device_device_info_fn, + name_fn=lambda device: "Uplink MAC", + object_fn=lambda api, obj_id: api.devices[obj_id], + unique_id_fn=lambda hub, obj_id: f"device_uplink_mac-{obj_id}", + supported_fn=async_device_uplink_mac_supported_fn, + value_fn=lambda hub, device: device.raw.get("uplink", {}).get("uplink_mac"), + is_connected_fn=lambda hub, obj_id: hub.api.devices[obj_id].state == 1, + ), UnifiSensorEntityDescription[Devices, Device]( key="Device State", device_class=SensorDeviceClass.ENUM, @@ -544,7 +633,7 @@ ENTITY_DESCRIPTIONS: tuple[UnifiSensorEntityDescription, ...] = ( ), ) -ENTITY_DESCRIPTIONS += make_wan_latency_sensors() +ENTITY_DESCRIPTIONS += make_wan_latency_sensors() + make_device_temperatur_sensors() async def async_setup_entry( diff --git a/homeassistant/components/unifiprotect/icons.json b/homeassistant/components/unifiprotect/icons.json index bb713d4ee79..5e80e3095b3 100644 --- a/homeassistant/components/unifiprotect/icons.json +++ b/homeassistant/components/unifiprotect/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_doorbell_text": "mdi:message-plus", - "remove_doorbell_text": "mdi:message-minus", - "set_chime_paired_doorbells": "mdi:bell-cog", - "remove_privacy_zone": "mdi:eye-minus" + "add_doorbell_text": { + "service": "mdi:message-plus" + }, + "remove_doorbell_text": { + "service": "mdi:message-minus" + }, + "set_chime_paired_doorbells": { + "service": "mdi:bell-cog" + }, + "remove_privacy_zone": { + "service": "mdi:eye-minus" + } } } diff --git a/homeassistant/components/unifiprotect/manifest.json b/homeassistant/components/unifiprotect/manifest.json index afc4b9a06e6..4483a5990eb 100644 --- a/homeassistant/components/unifiprotect/manifest.json +++ b/homeassistant/components/unifiprotect/manifest.json @@ -40,7 +40,7 @@ "integration_type": "hub", "iot_class": "local_push", "loggers": ["uiprotect", "unifi_discovery"], - "requirements": ["uiprotect==5.4.0", "unifi-discovery==1.2.0"], + "requirements": ["uiprotect==6.0.2", "unifi-discovery==1.2.0"], "ssdp": [ { "manufacturer": "Ubiquiti Networks", diff --git a/homeassistant/components/unifiprotect/migrate.py b/homeassistant/components/unifiprotect/migrate.py index e469b684518..2c631489217 100644 --- a/homeassistant/components/unifiprotect/migrate.py +++ b/homeassistant/components/unifiprotect/migrate.py @@ -107,20 +107,18 @@ async def async_migrate_data( ) -> None: """Run all valid UniFi Protect data migrations.""" - _LOGGER.debug("Start Migrate: async_deprecate_hdr_package") - async_deprecate_hdr_package(hass, entry) - _LOGGER.debug("Completed Migrate: async_deprecate_hdr_package") + _LOGGER.debug("Start Migrate: async_deprecate_hdr") + async_deprecate_hdr(hass, entry) + _LOGGER.debug("Completed Migrate: async_deprecate_hdr") @callback -def async_deprecate_hdr_package(hass: HomeAssistant, entry: UFPConfigEntry) -> None: - """Check for usages of hdr_mode switch and package sensor and raise repair if it is used. +def async_deprecate_hdr(hass: HomeAssistant, entry: UFPConfigEntry) -> None: + """Check for usages of hdr_mode switch and raise repair if it is used. UniFi Protect v3.0.22 changed how HDR works so it is no longer a simple on/off toggle. There is Always On, Always Off and Auto. So it has been migrated to a select. The old switch is now deprecated. - Additionally, the Package sensor is no longer functional due to how events work so a repair to notify users. - Added in 2024.4.0 """ @@ -128,11 +126,5 @@ def async_deprecate_hdr_package(hass: HomeAssistant, entry: UFPConfigEntry) -> N hass, entry, "2024.10.0", - { - "hdr_switch": {"id": "hdr_mode", "platform": Platform.SWITCH}, - "package_sensor": { - "id": "smart_obj_package", - "platform": Platform.BINARY_SENSOR, - }, - }, + {"hdr_switch": {"id": "hdr_mode", "platform": Platform.SWITCH}}, ) diff --git a/homeassistant/components/unifiprotect/strings.json b/homeassistant/components/unifiprotect/strings.json index f785498c005..aaef111a351 100644 --- a/homeassistant/components/unifiprotect/strings.json +++ b/homeassistant/components/unifiprotect/strings.json @@ -124,10 +124,6 @@ "deprecate_hdr_switch": { "title": "HDR Mode Switch Deprecated", "description": "UniFi Protect v3 added a new state for HDR (auto). As a result, the HDR Mode Switch has been replaced with an HDR Mode Select, and it is deprecated.\n\nBelow are the detected automations or scripts that use one or more of the deprecated entities:\n{items}\nThe above list may be incomplete and it does not include any template usages inside of dashboards. Please update any templates, automations or scripts accordingly." - }, - "deprecate_package_sensor": { - "title": "Package Event Sensor Deprecated", - "description": "The package event sensor never tripped because of the way events are reported in UniFi Protect. As a result, the sensor is deprecated and will be removed.\n\nBelow are the detected automations or scripts that use one or more of the deprecated entities:\n{items}\nThe above list may be incomplete and it does not include any template usages inside of dashboards. Please update any templates, automations or scripts accordingly." } }, "entity": { diff --git a/homeassistant/components/universal/icons.json b/homeassistant/components/universal/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/universal/icons.json +++ b/homeassistant/components/universal/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/upb/config_flow.py b/homeassistant/components/upb/config_flow.py index fec93a51202..d9f111049fd 100644 --- a/homeassistant/components/upb/config_flow.py +++ b/homeassistant/components/upb/config_flow.py @@ -3,12 +3,13 @@ import asyncio from contextlib import suppress import logging +from typing import Any from urllib.parse import urlparse import upb_lib import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ADDRESS, CONF_FILE_PATH, CONF_HOST, CONF_PROTOCOL from homeassistant.exceptions import HomeAssistantError @@ -78,11 +79,9 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self) -> None: - """Initialize the UPB config flow.""" - self.importing = False - - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: @@ -102,9 +101,6 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): await self.async_set_unique_id(network_id) self._abort_if_unique_id_configured() - if self.importing: - return self.async_create_entry(title=info["title"], data=user_input) - return self.async_create_entry( title=info["title"], data={ @@ -117,11 +113,6 @@ class UPBConfigFlow(ConfigFlow, domain=DOMAIN): step_id="user", data_schema=DATA_SCHEMA, errors=errors ) - async def async_step_import(self, user_input): - """Handle import.""" - self.importing = True - return await self.async_step_user(user_input) - def _url_already_configured(self, url): """See if we already have a UPB PIM matching user input configured.""" existing_hosts = { diff --git a/homeassistant/components/upb/icons.json b/homeassistant/components/upb/icons.json index 187f0f60970..0274233da52 100644 --- a/homeassistant/components/upb/icons.json +++ b/homeassistant/components/upb/icons.json @@ -1,12 +1,28 @@ { "services": { - "light_fade_start": "mdi:transition", - "light_fade_stop": "mdi:transition-masked", - "light_blink": "mdi:eye", - "link_deactivate": "mdi:link-off", - "link_goto": "mdi:link-variant", - "link_fade_start": "mdi:transition", - "link_fade_stop": "mdi:transition-masked", - "link_blink": "mdi:eye" + "light_fade_start": { + "service": "mdi:transition" + }, + "light_fade_stop": { + "service": "mdi:transition-masked" + }, + "light_blink": { + "service": "mdi:eye" + }, + "link_deactivate": { + "service": "mdi:link-off" + }, + "link_goto": { + "service": "mdi:link-variant" + }, + "link_fade_start": { + "service": "mdi:transition" + }, + "link_fade_stop": { + "service": "mdi:transition-masked" + }, + "link_blink": { + "service": "mdi:eye" + } } } diff --git a/homeassistant/components/upb/light.py b/homeassistant/components/upb/light.py index eb20fc949dc..881eda3525f 100644 --- a/homeassistant/components/upb/light.py +++ b/homeassistant/components/upb/light.py @@ -42,7 +42,7 @@ async def async_setup_entry( SERVICE_LIGHT_FADE_START, UPB_BRIGHTNESS_RATE_SCHEMA, "async_light_fade_start" ) platform.async_register_entity_service( - SERVICE_LIGHT_FADE_STOP, {}, "async_light_fade_stop" + SERVICE_LIGHT_FADE_STOP, None, "async_light_fade_stop" ) platform.async_register_entity_service( SERVICE_LIGHT_BLINK, UPB_BLINK_RATE_SCHEMA, "async_light_blink" diff --git a/homeassistant/components/upb/scene.py b/homeassistant/components/upb/scene.py index 9cf6788de4f..276b620d5b5 100644 --- a/homeassistant/components/upb/scene.py +++ b/homeassistant/components/upb/scene.py @@ -31,10 +31,10 @@ async def async_setup_entry( platform = entity_platform.async_get_current_platform() platform.async_register_entity_service( - SERVICE_LINK_DEACTIVATE, {}, "async_link_deactivate" + SERVICE_LINK_DEACTIVATE, None, "async_link_deactivate" ) platform.async_register_entity_service( - SERVICE_LINK_FADE_STOP, {}, "async_link_fade_stop" + SERVICE_LINK_FADE_STOP, None, "async_link_fade_stop" ) platform.async_register_entity_service( SERVICE_LINK_GOTO, UPB_BRIGHTNESS_RATE_SCHEMA, "async_link_goto" diff --git a/homeassistant/components/update/__init__.py b/homeassistant/components/update/__init__.py index e7813b354c1..cd52de6550f 100644 --- a/homeassistant/components/update/__init__.py +++ b/homeassistant/components/update/__init__.py @@ -95,12 +95,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_SKIP, - {}, + None, async_skip, ) component.async_register_entity_service( "clear_skipped", - {}, + None, async_clear_skipped, ) diff --git a/homeassistant/components/update/icons.json b/homeassistant/components/update/icons.json index 96920c96253..89af07de67f 100644 --- a/homeassistant/components/update/icons.json +++ b/homeassistant/components/update/icons.json @@ -8,8 +8,14 @@ } }, "services": { - "clear_skipped": "mdi:package", - "install": "mdi:package-down", - "skip": "mdi:package-check" + "clear_skipped": { + "service": "mdi:package" + }, + "install": { + "service": "mdi:package-down" + }, + "skip": { + "service": "mdi:package-check" + } } } diff --git a/homeassistant/components/upnp/__init__.py b/homeassistant/components/upnp/__init__.py index 9b51e548f80..214521ee9c0 100644 --- a/homeassistant/components/upnp/__init__.py +++ b/homeassistant/components/upnp/__init__.py @@ -12,7 +12,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from homeassistant.helpers import config_validation as cv, device_registry as dr +from homeassistant.helpers import device_registry as dr from .const import ( CONFIG_ENTRY_FORCE_POLL, @@ -35,7 +35,6 @@ NOTIFICATION_TITLE = "UPnP/IGD Setup" PLATFORMS = [Platform.BINARY_SENSOR, Platform.SENSOR] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) type UpnpConfigEntry = ConfigEntry[UpnpDataUpdateCoordinator] diff --git a/homeassistant/components/utility_meter/icons.json b/homeassistant/components/utility_meter/icons.json index 3c447b4a810..2539b73d168 100644 --- a/homeassistant/components/utility_meter/icons.json +++ b/homeassistant/components/utility_meter/icons.json @@ -12,7 +12,11 @@ } }, "services": { - "reset": "mdi:numeric-0-box-outline", - "calibrate": "mdi:auto-fix" + "reset": { + "service": "mdi:numeric-0-box-outline" + }, + "calibrate": { + "service": "mdi:auto-fix" + } } } diff --git a/homeassistant/components/uvc/manifest.json b/homeassistant/components/uvc/manifest.json index 57e798c3fa6..c72b865b5ef 100644 --- a/homeassistant/components/uvc/manifest.json +++ b/homeassistant/components/uvc/manifest.json @@ -5,5 +5,5 @@ "documentation": "https://www.home-assistant.io/integrations/uvc", "iot_class": "local_polling", "loggers": ["uvcclient"], - "requirements": ["uvcclient==0.11.0"] + "requirements": ["uvcclient==0.12.1"] } diff --git a/homeassistant/components/vacuum/__init__.py b/homeassistant/components/vacuum/__init__.py index 90018e2d8cc..867e25d4b2a 100644 --- a/homeassistant/components/vacuum/__init__.py +++ b/homeassistant/components/vacuum/__init__.py @@ -116,37 +116,37 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: component.async_register_entity_service( SERVICE_START, - {}, + None, "async_start", [VacuumEntityFeature.START], ) component.async_register_entity_service( SERVICE_PAUSE, - {}, + None, "async_pause", [VacuumEntityFeature.PAUSE], ) component.async_register_entity_service( SERVICE_RETURN_TO_BASE, - {}, + None, "async_return_to_base", [VacuumEntityFeature.RETURN_HOME], ) component.async_register_entity_service( SERVICE_CLEAN_SPOT, - {}, + None, "async_clean_spot", [VacuumEntityFeature.CLEAN_SPOT], ) component.async_register_entity_service( SERVICE_LOCATE, - {}, + None, "async_locate", [VacuumEntityFeature.LOCATE], ) component.async_register_entity_service( SERVICE_STOP, - {}, + None, "async_stop", [VacuumEntityFeature.STOP], ) diff --git a/homeassistant/components/vacuum/icons.json b/homeassistant/components/vacuum/icons.json index 25f0cfd03ef..4169729efec 100644 --- a/homeassistant/components/vacuum/icons.json +++ b/homeassistant/components/vacuum/icons.json @@ -5,17 +5,41 @@ } }, "services": { - "clean_spot": "mdi:target-variant", - "locate": "mdi:map-marker", - "pause": "mdi:pause", - "return_to_base": "mdi:home-import-outline", - "send_command": "mdi:send", - "set_fan_speed": "mdi:fan", - "start": "mdi:play", - "start_pause": "mdi:play-pause", - "stop": "mdi:stop", - "toggle": "mdi:play-pause", - "turn_off": "mdi:stop", - "turn_on": "mdi:play" + "clean_spot": { + "service": "mdi:target-variant" + }, + "locate": { + "service": "mdi:map-marker" + }, + "pause": { + "service": "mdi:pause" + }, + "return_to_base": { + "service": "mdi:home-import-outline" + }, + "send_command": { + "service": "mdi:send" + }, + "set_fan_speed": { + "service": "mdi:fan" + }, + "start": { + "service": "mdi:play" + }, + "start_pause": { + "service": "mdi:play-pause" + }, + "stop": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:play-pause" + }, + "turn_off": { + "service": "mdi:stop" + }, + "turn_on": { + "service": "mdi:play" + } } } diff --git a/homeassistant/components/vallox/icons.json b/homeassistant/components/vallox/icons.json index 67b41d216d2..f6beb55f1da 100644 --- a/homeassistant/components/vallox/icons.json +++ b/homeassistant/components/vallox/icons.json @@ -37,8 +37,14 @@ } }, "services": { - "set_profile_fan_speed_home": "mdi:home", - "set_profile_fan_speed_away": "mdi:walk", - "set_profile_fan_speed_boost": "mdi:speedometer" + "set_profile_fan_speed_home": { + "service": "mdi:home" + }, + "set_profile_fan_speed_away": { + "service": "mdi:walk" + }, + "set_profile_fan_speed_boost": { + "service": "mdi:speedometer" + } } } diff --git a/homeassistant/components/valve/__init__.py b/homeassistant/components/valve/__init__.py index 3814275b703..04ce12e8a8f 100644 --- a/homeassistant/components/valve/__init__.py +++ b/homeassistant/components/valve/__init__.py @@ -71,11 +71,14 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_OPEN_VALVE, {}, "async_handle_open_valve", [ValveEntityFeature.OPEN] + SERVICE_OPEN_VALVE, None, "async_handle_open_valve", [ValveEntityFeature.OPEN] ) component.async_register_entity_service( - SERVICE_CLOSE_VALVE, {}, "async_handle_close_valve", [ValveEntityFeature.CLOSE] + SERVICE_CLOSE_VALVE, + None, + "async_handle_close_valve", + [ValveEntityFeature.CLOSE], ) component.async_register_entity_service( @@ -90,12 +93,12 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: ) component.async_register_entity_service( - SERVICE_STOP_VALVE, {}, "async_stop_valve", [ValveEntityFeature.STOP] + SERVICE_STOP_VALVE, None, "async_stop_valve", [ValveEntityFeature.STOP] ) component.async_register_entity_service( SERVICE_TOGGLE, - {}, + None, "async_toggle", [ValveEntityFeature.OPEN | ValveEntityFeature.CLOSE], ) diff --git a/homeassistant/components/valve/icons.json b/homeassistant/components/valve/icons.json index 2c887ebf273..c9c6b632dcb 100644 --- a/homeassistant/components/valve/icons.json +++ b/homeassistant/components/valve/icons.json @@ -17,10 +17,20 @@ } }, "services": { - "close_valve": "mdi:valve-closed", - "open_valve": "mdi:valve-open", - "set_valve_position": "mdi:valve", - "stop_valve": "mdi:stop", - "toggle": "mdi:valve-open" + "close_valve": { + "service": "mdi:valve-closed" + }, + "open_valve": { + "service": "mdi:valve-open" + }, + "set_valve_position": { + "service": "mdi:valve" + }, + "stop_valve": { + "service": "mdi:stop" + }, + "toggle": { + "service": "mdi:valve-open" + } } } diff --git a/homeassistant/components/velbus/__init__.py b/homeassistant/components/velbus/__init__.py index d47444e3994..685f8b49500 100644 --- a/homeassistant/components/velbus/__init__.py +++ b/homeassistant/components/velbus/__init__.py @@ -119,7 +119,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: async def set_memo_text(call: ServiceCall) -> None: """Handle Memo Text service call.""" memo_text = call.data[CONF_MEMO_TEXT] - memo_text.hass = hass await ( hass.data[DOMAIN][call.data[CONF_INTERFACE]]["cntrl"] .get_module(call.data[CONF_ADDRESS]) diff --git a/homeassistant/components/velbus/icons.json b/homeassistant/components/velbus/icons.json index a806782d189..a46f5e5fbf1 100644 --- a/homeassistant/components/velbus/icons.json +++ b/homeassistant/components/velbus/icons.json @@ -1,8 +1,16 @@ { "services": { - "sync_clock": "mdi:clock", - "scan": "mdi:magnify", - "clear_cache": "mdi:delete", - "set_memo_text": "mdi:note-text" + "sync_clock": { + "service": "mdi:clock" + }, + "scan": { + "service": "mdi:magnify" + }, + "clear_cache": { + "service": "mdi:delete" + }, + "set_memo_text": { + "service": "mdi:note-text" + } } } diff --git a/homeassistant/components/velux/__init__.py b/homeassistant/components/velux/__init__.py index 1b7cbd1ff93..614ed810429 100644 --- a/homeassistant/components/velux/__init__.py +++ b/homeassistant/components/velux/__init__.py @@ -1,48 +1,14 @@ """Support for VELUX KLF 200 devices.""" from pyvlx import Node, PyVLX, PyVLXException -import voluptuous as vol -from homeassistant.config_entries import SOURCE_IMPORT, ConfigEntry +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST, CONF_PASSWORD, EVENT_HOMEASSISTANT_STOP from homeassistant.core import HomeAssistant, ServiceCall, callback -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.entity import Entity -from homeassistant.helpers.typing import ConfigType from .const import DOMAIN, LOGGER, PLATFORMS -CONFIG_SCHEMA = vol.Schema( - vol.All( - cv.deprecated(DOMAIN), - { - DOMAIN: vol.Schema( - { - vol.Required(CONF_HOST): cv.string, - vol.Required(CONF_PASSWORD): cv.string, - } - ) - }, - ), - extra=vol.ALLOW_EXTRA, -) - - -async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: - """Set up the velux component.""" - if DOMAIN not in config: - return True - - hass.async_create_task( - hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=config[DOMAIN], - ) - ) - - return True - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up the velux component.""" diff --git a/homeassistant/components/velux/config_flow.py b/homeassistant/components/velux/config_flow.py index c0d4ec8035b..f4bfa13b4d5 100644 --- a/homeassistant/components/velux/config_flow.py +++ b/homeassistant/components/velux/config_flow.py @@ -1,15 +1,11 @@ """Config flow for Velux integration.""" -from typing import Any - from pyvlx import PyVLX, PyVLXException import voluptuous as vol from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_HOST, CONF_PASSWORD -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN import homeassistant.helpers.config_validation as cv -from homeassistant.helpers.issue_registry import IssueSeverity, async_create_issue from .const import DOMAIN, LOGGER @@ -24,59 +20,6 @@ DATA_SCHEMA = vol.Schema( class VeluxConfigFlow(ConfigFlow, domain=DOMAIN): """Handle a config flow for velux.""" - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: - """Import a config entry.""" - - def create_repair(error: str | None = None) -> None: - if error: - async_create_issue( - self.hass, - DOMAIN, - f"deprecated_yaml_import_issue_{error}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key=f"deprecated_yaml_import_issue_{error}", - ) - else: - async_create_issue( - self.hass, - HOMEASSISTANT_DOMAIN, - f"deprecated_yaml_{DOMAIN}", - breaks_in_ha_version="2024.9.0", - is_fixable=False, - issue_domain=DOMAIN, - severity=IssueSeverity.WARNING, - translation_key="deprecated_yaml", - translation_placeholders={ - "domain": DOMAIN, - "integration_title": "Velux", - }, - ) - - for entry in self._async_current_entries(): - if entry.data[CONF_HOST] == config[CONF_HOST]: - create_repair() - return self.async_abort(reason="already_configured") - - pyvlx = PyVLX(host=config[CONF_HOST], password=config[CONF_PASSWORD]) - try: - await pyvlx.connect() - await pyvlx.disconnect() - except (PyVLXException, ConnectionError): - create_repair("cannot_connect") - return self.async_abort(reason="cannot_connect") - except Exception: # noqa: BLE001 - create_repair("unknown") - return self.async_abort(reason="unknown") - - create_repair() - return self.async_create_entry( - title=config[CONF_HOST], - data=config, - ) - async def async_step_user( self, user_input: dict[str, str] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/velux/icons.json b/homeassistant/components/velux/icons.json index a16e7b50093..78cb5b14838 100644 --- a/homeassistant/components/velux/icons.json +++ b/homeassistant/components/velux/icons.json @@ -1,5 +1,7 @@ { "services": { - "reboot_gateway": "mdi:restart" + "reboot_gateway": { + "service": "mdi:restart" + } } } diff --git a/homeassistant/components/velux/strings.json b/homeassistant/components/velux/strings.json index 3964c22efe2..5b7b459a3f7 100644 --- a/homeassistant/components/velux/strings.json +++ b/homeassistant/components/velux/strings.json @@ -17,16 +17,6 @@ "already_configured": "[%key:common::config_flow::abort::already_configured_device%]" } }, - "issues": { - "deprecated_yaml_import_issue_cannot_connect": { - "title": "The Velux YAML configuration import cannot connect to server", - "description": "Configuring Velux using YAML is being removed but there was an connection error importing your YAML configuration.\n\nMake sure your home assistant can reach the KLF 200." - }, - "deprecated_yaml_import_issue_unknown": { - "title": "The Velux YAML configuration import failed with unknown error raised by pyvlx", - "description": "Configuring Velux using YAML is being removed but there was an unknown error importing your YAML configuration.\n\nCheck your configuration or have a look at the documentation of the integration." - } - }, "services": { "reboot_gateway": { "name": "Reboot gateway", diff --git a/homeassistant/components/venstar/config_flow.py b/homeassistant/components/venstar/config_flow.py index 289f7936676..929f5718c19 100644 --- a/homeassistant/components/venstar/config_flow.py +++ b/homeassistant/components/venstar/config_flow.py @@ -15,7 +15,6 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers.typing import ConfigType from .const import _LOGGER, DOMAIN, VENSTAR_TIMEOUT @@ -85,7 +84,7 @@ class VenstarConfigFlow(ConfigFlow, domain=DOMAIN): errors=errors, ) - async def async_step_import(self, import_data: ConfigType) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import entry from configuration.yaml.""" self._async_abort_entries_match({CONF_HOST: import_data[CONF_HOST]}) return await self.async_step_user( diff --git a/homeassistant/components/venstar/sensor.py b/homeassistant/components/venstar/sensor.py index ee4ad43ade6..484aa711c1e 100644 --- a/homeassistant/components/venstar/sensor.py +++ b/homeassistant/components/venstar/sensor.py @@ -75,7 +75,7 @@ class VenstarSensorEntityDescription(SensorEntityDescription): value_fn: Callable[[VenstarDataUpdateCoordinator, str], Any] name_fn: Callable[[str], str] | None - uom_fn: Callable[[Any], str | None] + uom_fn: Callable[[VenstarDataUpdateCoordinator], str | None] async def async_setup_entry( @@ -99,11 +99,18 @@ async def async_setup_entry( ) runtimes = coordinator.runtimes[-1] - entities.extend( - VenstarSensor(coordinator, config_entry, RUNTIME_ENTITY, sensor_name) - for sensor_name in runtimes - if sensor_name in RUNTIME_DEVICES - ) + for sensor_name in runtimes: + if sensor_name in RUNTIME_DEVICES: + entities.append( + VenstarSensor( + coordinator, config_entry, RUNTIME_ENTITY, sensor_name + ) + ) + entities.extend( + VenstarSensor(coordinator, config_entry, description, sensor_name) + for description in CONSUMABLE_ENTITIES + if description.key == sensor_name + ) for description in INFO_ENTITIES: try: @@ -224,6 +231,27 @@ RUNTIME_ENTITY = VenstarSensorEntityDescription( name_fn=lambda sensor_name: f"{RUNTIME_ATTRIBUTES[sensor_name]} Runtime", ) +CONSUMABLE_ENTITIES: tuple[VenstarSensorEntityDescription, ...] = ( + VenstarSensorEntityDescription( + key="filterHours", + state_class=SensorStateClass.MEASUREMENT, + uom_fn=lambda _: UnitOfTime.HOURS, + value_fn=lambda coordinator, sensor_name: ( + coordinator.runtimes[-1][sensor_name] / 100 + ), + name_fn=None, + translation_key="filter_install_time", + ), + VenstarSensorEntityDescription( + key="filterDays", + state_class=SensorStateClass.MEASUREMENT, + uom_fn=lambda _: UnitOfTime.DAYS, + value_fn=lambda coordinator, sensor_name: coordinator.runtimes[-1][sensor_name], + name_fn=None, + translation_key="filter_usage", + ), +) + INFO_ENTITIES: tuple[VenstarSensorEntityDescription, ...] = ( VenstarSensorEntityDescription( key="schedulepart", diff --git a/homeassistant/components/venstar/strings.json b/homeassistant/components/venstar/strings.json index 952353dcbfe..fdc75162651 100644 --- a/homeassistant/components/venstar/strings.json +++ b/homeassistant/components/venstar/strings.json @@ -25,6 +25,12 @@ }, "entity": { "sensor": { + "filter_install_time": { + "name": "Filter installation time" + }, + "filter_usage": { + "name": "Filter usage" + }, "schedule_part": { "name": "Schedule Part", "state": { diff --git a/homeassistant/components/vera/config_flow.py b/homeassistant/components/vera/config_flow.py index 181849f46a1..08e7640773b 100644 --- a/homeassistant/components/vera/config_flow.py +++ b/homeassistant/components/vera/config_flow.py @@ -127,7 +127,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): ), ) - async def async_step_import(self, config: dict[str, Any]) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle a flow initialized by import.""" # If there are entities with the legacy unique_id, then this imported config @@ -146,7 +146,7 @@ class VeraFlowHandler(ConfigFlow, domain=DOMAIN): return await self.async_step_finish( { - **config, + **import_data, CONF_SOURCE: SOURCE_IMPORT, CONF_LEGACY_UNIQUE_ID: use_legacy_unique_id, } diff --git a/homeassistant/components/verisure/__init__.py b/homeassistant/components/verisure/__init__.py index 9e5f0ca2703..0f8c8d936ef 100644 --- a/homeassistant/components/verisure/__init__.py +++ b/homeassistant/components/verisure/__init__.py @@ -12,7 +12,6 @@ from homeassistant.const import CONF_EMAIL, Platform from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady from homeassistant.helpers import entity_registry as er -import homeassistant.helpers.config_validation as cv from homeassistant.helpers.storage import STORAGE_DIR from .const import CONF_LOCK_DEFAULT_CODE, DOMAIN, LOGGER @@ -27,8 +26,6 @@ PLATFORMS = [ Platform.SWITCH, ] -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Set up Verisure from a config entry.""" diff --git a/homeassistant/components/verisure/camera.py b/homeassistant/components/verisure/camera.py index 72f5ab93c70..50606a49eab 100644 --- a/homeassistant/components/verisure/camera.py +++ b/homeassistant/components/verisure/camera.py @@ -33,7 +33,7 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_CAPTURE_SMARTCAM, - {}, + None, VerisureSmartcam.capture_smartcam.__name__, ) diff --git a/homeassistant/components/verisure/icons.json b/homeassistant/components/verisure/icons.json index 35f6960b1e8..809cf004a3f 100644 --- a/homeassistant/components/verisure/icons.json +++ b/homeassistant/components/verisure/icons.json @@ -1,7 +1,13 @@ { "services": { - "capture_smartcam": "mdi:camera", - "enable_autolock": "mdi:lock", - "disable_autolock": "mdi:lock-off" + "capture_smartcam": { + "service": "mdi:camera" + }, + "enable_autolock": { + "service": "mdi:lock" + }, + "disable_autolock": { + "service": "mdi:lock-off" + } } } diff --git a/homeassistant/components/verisure/lock.py b/homeassistant/components/verisure/lock.py index da2bc2ced2b..5c56fc0df2c 100644 --- a/homeassistant/components/verisure/lock.py +++ b/homeassistant/components/verisure/lock.py @@ -41,12 +41,12 @@ async def async_setup_entry( platform = async_get_current_platform() platform.async_register_entity_service( SERVICE_DISABLE_AUTOLOCK, - {}, + None, VerisureDoorlock.disable_autolock.__name__, ) platform.async_register_entity_service( SERVICE_ENABLE_AUTOLOCK, - {}, + None, VerisureDoorlock.enable_autolock.__name__, ) diff --git a/homeassistant/components/vesync/__init__.py b/homeassistant/components/vesync/__init__.py index 7dceb1b3f8f..04547d33dea 100644 --- a/homeassistant/components/vesync/__init__.py +++ b/homeassistant/components/vesync/__init__.py @@ -7,7 +7,6 @@ from pyvesync import VeSync from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform from homeassistant.core import HomeAssistant, ServiceCall -from homeassistant.helpers import config_validation as cv from homeassistant.helpers.dispatcher import async_dispatcher_send from .common import async_process_devices @@ -26,8 +25,6 @@ PLATFORMS = [Platform.FAN, Platform.LIGHT, Platform.SENSOR, Platform.SWITCH] _LOGGER = logging.getLogger(__name__) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) - async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool: """Set up Vesync as config entry.""" diff --git a/homeassistant/components/vesync/config_flow.py b/homeassistant/components/vesync/config_flow.py index 15f9f548e35..6115cb9ee76 100644 --- a/homeassistant/components/vesync/config_flow.py +++ b/homeassistant/components/vesync/config_flow.py @@ -1,40 +1,42 @@ """Config flow utilities.""" -from collections import OrderedDict +from typing import Any from pyvesync import VeSync import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import callback +import homeassistant.helpers.config_validation as cv from .const import DOMAIN +DATA_SCHEMA = vol.Schema( + { + vol.Required(CONF_USERNAME): cv.string, + vol.Required(CONF_PASSWORD): cv.string, + } +) + class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN): """Handle a config flow.""" VERSION = 1 - def __init__(self) -> None: - """Instantiate config flow.""" - self._username = None - self._password = None - self.data_schema = OrderedDict() - self.data_schema[vol.Required(CONF_USERNAME)] = str - self.data_schema[vol.Required(CONF_PASSWORD)] = str - @callback - def _show_form(self, errors=None): + def _show_form(self, errors: dict[str, str] | None = None) -> ConfigFlowResult: """Show form to the user.""" return self.async_show_form( step_id="user", - data_schema=vol.Schema(self.data_schema), + data_schema=DATA_SCHEMA, errors=errors if errors else {}, ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" if self._async_current_entries(): return self.async_abort(reason="single_instance_allowed") @@ -42,15 +44,15 @@ class VeSyncFlowHandler(ConfigFlow, domain=DOMAIN): if not user_input: return self._show_form() - self._username = user_input[CONF_USERNAME] - self._password = user_input[CONF_PASSWORD] + username = user_input[CONF_USERNAME] + password = user_input[CONF_PASSWORD] - manager = VeSync(self._username, self._password) + manager = VeSync(username, password) login = await self.hass.async_add_executor_job(manager.login) if not login: return self._show_form(errors={"base": "invalid_auth"}) return self.async_create_entry( - title=self._username, - data={CONF_USERNAME: self._username, CONF_PASSWORD: self._password}, + title=username, + data={CONF_USERNAME: username, CONF_PASSWORD: password}, ) diff --git a/homeassistant/components/vesync/const.py b/homeassistant/components/vesync/const.py index 54fc21d2659..50dce95e42a 100644 --- a/homeassistant/components/vesync/const.py +++ b/homeassistant/components/vesync/const.py @@ -23,6 +23,7 @@ DEV_TYPE_TO_HA = { "Core300S": "fan", "Core400S": "fan", "Core600S": "fan", + "EverestAir": "fan", "Vital200S": "fan", "Vital100S": "fan", "ESD16": "walldimmer", @@ -60,4 +61,9 @@ SKU_TO_BASE_DEVICE = { "LAP-V102S-AASR": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-WEU": "Vital100S", # Alt ID Model Vital100S "LAP-V102S-WUK": "Vital100S", # Alt ID Model Vital100S + "EverestAir": "EverestAir", + "LAP-EL551S-AUS": "EverestAir", # Alt ID Model EverestAir + "LAP-EL551S-AEUR": "EverestAir", # Alt ID Model EverestAir + "LAP-EL551S-WEU": "EverestAir", # Alt ID Model EverestAir + "LAP-EL551S-WUS": "EverestAir", # Alt ID Model EverestAir } diff --git a/homeassistant/components/vesync/fan.py b/homeassistant/components/vesync/fan.py index 4dce2762eef..6ef9e41eb43 100644 --- a/homeassistant/components/vesync/fan.py +++ b/homeassistant/components/vesync/fan.py @@ -25,6 +25,7 @@ _LOGGER = logging.getLogger(__name__) FAN_MODE_AUTO = "auto" FAN_MODE_SLEEP = "sleep" FAN_MODE_PET = "pet" +FAN_MODE_TURBO = "turbo" PRESET_MODES = { "LV-PUR131S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], @@ -32,6 +33,7 @@ PRESET_MODES = { "Core300S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], "Core400S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], "Core600S": [FAN_MODE_AUTO, FAN_MODE_SLEEP], + "EverestAir": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_TURBO], "Vital200S": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_PET], "Vital100S": [FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_PET], } @@ -41,6 +43,7 @@ SPEED_RANGE = { # off is not included "Core300S": (1, 3), "Core400S": (1, 4), "Core600S": (1, 4), + "EverestAir": (1, 3), "Vital200S": (1, 4), "Vital100S": (1, 4), } @@ -125,7 +128,7 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): @property def preset_mode(self) -> str | None: """Get the current preset mode.""" - if self.smartfan.mode in (FAN_MODE_AUTO, FAN_MODE_SLEEP): + if self.smartfan.mode in (FAN_MODE_AUTO, FAN_MODE_SLEEP, FAN_MODE_TURBO): return self.smartfan.mode return None @@ -192,6 +195,8 @@ class VeSyncFanHA(VeSyncDevice, FanEntity): self.smartfan.sleep_mode() elif preset_mode == FAN_MODE_PET: self.smartfan.pet_mode() + elif preset_mode == FAN_MODE_TURBO: + self.smartfan.turbo_mode() self.schedule_update_ha_state() diff --git a/homeassistant/components/vesync/icons.json b/homeassistant/components/vesync/icons.json index a4bf4afd410..cfdefb2ed09 100644 --- a/homeassistant/components/vesync/icons.json +++ b/homeassistant/components/vesync/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_devices": "mdi:update" + "update_devices": { + "service": "mdi:update" + } } } diff --git a/homeassistant/components/vesync/sensor.py b/homeassistant/components/vesync/sensor.py index 81f42f4c2ee..8939295a2db 100644 --- a/homeassistant/components/vesync/sensor.py +++ b/homeassistant/components/vesync/sensor.py @@ -72,6 +72,7 @@ FILTER_LIFE_SUPPORTED = [ "Core300S", "Core400S", "Core600S", + "EverestAir", "Vital100S", "Vital200S", ] @@ -83,7 +84,14 @@ AIR_QUALITY_SUPPORTED = [ "Vital100S", "Vital200S", ] -PM25_SUPPORTED = ["Core300S", "Core400S", "Core600S", "Vital100S", "Vital200S"] +PM25_SUPPORTED = [ + "Core300S", + "Core400S", + "Core600S", + "EverestAir", + "Vital100S", + "Vital200S", +] SENSORS: tuple[VeSyncSensorEntityDescription, ...] = ( VeSyncSensorEntityDescription( diff --git a/homeassistant/components/vicare/binary_sensor.py b/homeassistant/components/vicare/binary_sensor.py index 2df8a2f06d3..7fe248fa266 100644 --- a/homeassistant/components/vicare/binary_sensor.py +++ b/homeassistant/components/vicare/binary_sensor.py @@ -10,7 +10,7 @@ import logging from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( - HeatingDeviceWithComponent as PyViCareHeatingDeviceWithComponent, + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, ) from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, @@ -112,61 +112,36 @@ def _build_entities( entities: list[ViCareBinarySensor] = [] for device in device_list: - entities.extend(_build_entities_for_device(device.api, device.config)) + # add device entities entities.extend( - _build_entities_for_component( - get_circuits(device.api), device.config, CIRCUIT_SENSORS + ViCareBinarySensor( + description, + device.config, + device.api, ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device.api) ) - entities.extend( - _build_entities_for_component( - get_burners(device.api), device.config, BURNER_SENSORS + # add component entities + for component_list, entity_description_list in ( + (get_circuits(device.api), CIRCUIT_SENSORS), + (get_burners(device.api), BURNER_SENSORS), + (get_compressors(device.api), COMPRESSOR_SENSORS), + ): + entities.extend( + ViCareBinarySensor( + description, + device.config, + device.api, + component, + ) + for component in component_list + for description in entity_description_list + if is_supported(description.key, description, component) ) - ) - entities.extend( - _build_entities_for_component( - get_compressors(device.api), device.config, COMPRESSOR_SENSORS - ) - ) return entities -def _build_entities_for_device( - device: PyViCareDevice, - device_config: PyViCareDeviceConfig, -) -> list[ViCareBinarySensor]: - """Create device specific ViCare binary sensor entities.""" - - return [ - ViCareBinarySensor( - device, - device_config, - description, - ) - for description in GLOBAL_SENSORS - if is_supported(description.key, description, device) - ] - - -def _build_entities_for_component( - components: list[PyViCareHeatingDeviceWithComponent], - device_config: PyViCareDeviceConfig, - entity_descriptions: tuple[ViCareBinarySensorEntityDescription, ...], -) -> list[ViCareBinarySensor]: - """Create component specific ViCare binary sensor entities.""" - - return [ - ViCareBinarySensor( - component, - device_config, - description, - ) - for component in components - for description in entity_descriptions - if is_supported(description.key, description, component) - ] - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -190,12 +165,13 @@ class ViCareBinarySensor(ViCareEntity, BinarySensorEntity): def __init__( self, - api: PyViCareDevice, - device_config: PyViCareDeviceConfig, description: ViCareBinarySensorEntityDescription, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(device_config, api, description.key) + super().__init__(description.key, device_config, device, component) self.entity_description = description @property diff --git a/homeassistant/components/vicare/button.py b/homeassistant/components/vicare/button.py index c927055dadd..51a763c1fcc 100644 --- a/homeassistant/components/vicare/button.py +++ b/homeassistant/components/vicare/button.py @@ -54,9 +54,9 @@ def _build_entities( return [ ViCareButton( - device.api, - device.config, description, + device.config, + device.api, ) for device in device_list for description in BUTTON_DESCRIPTIONS @@ -87,12 +87,12 @@ class ViCareButton(ViCareEntity, ButtonEntity): def __init__( self, - api: PyViCareDevice, - device_config: PyViCareDeviceConfig, description: ViCareButtonEntityDescription, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, ) -> None: """Initialize the button.""" - super().__init__(device_config, api, description.key) + super().__init__(description.key, device_config, device) self.entity_description = description def press(self) -> None: diff --git a/homeassistant/components/vicare/climate.py b/homeassistant/components/vicare/climate.py index 1333327609d..4968e565d0b 100644 --- a/homeassistant/components/vicare/climate.py +++ b/homeassistant/components/vicare/climate.py @@ -87,10 +87,9 @@ def _build_entities( """Create ViCare climate entities for a device.""" return [ ViCareClimate( + device.config, device.api, circuit, - device.config, - "heating", ) for device in device_list for circuit in get_circuits(device.api) @@ -136,24 +135,22 @@ class ViCareClimate(ViCareEntity, ClimateEntity): _attr_min_temp = VICARE_TEMP_HEATING_MIN _attr_max_temp = VICARE_TEMP_HEATING_MAX _attr_target_temperature_step = PRECISION_WHOLE + _attr_translation_key = "heating" _current_action: bool | None = None _current_mode: str | None = None + _current_program: str | None = None _enable_turn_on_off_backwards_compatibility = False def __init__( self, - api: PyViCareDevice, - circuit: PyViCareHeatingCircuit, device_config: PyViCareDeviceConfig, - translation_key: str, + device: PyViCareDevice, + circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the climate device.""" - super().__init__(device_config, api, circuit.id) + super().__init__(circuit.id, device_config, device) self._circuit = circuit self._attributes: dict[str, Any] = {} - self._current_program = None - self._attr_translation_key = translation_key - self._attributes["vicare_programs"] = self._circuit.getPrograms() self._attr_preset_modes = [ preset @@ -340,7 +337,7 @@ class ViCareClimate(ViCareEntity, ClimateEntity): ) from err @property - def extra_state_attributes(self): + def extra_state_attributes(self) -> dict[str, Any]: """Show Device Attributes.""" return self._attributes diff --git a/homeassistant/components/vicare/entity.py b/homeassistant/components/vicare/entity.py index 1bb2993cd3a..eef114b4039 100644 --- a/homeassistant/components/vicare/entity.py +++ b/homeassistant/components/vicare/entity.py @@ -2,6 +2,9 @@ from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig +from PyViCare.PyViCareHeatingDevice import ( + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, +) from homeassistant.helpers.device_registry import DeviceInfo from homeassistant.helpers.entity import Entity @@ -16,21 +19,24 @@ class ViCareEntity(Entity): def __init__( self, + unique_id_suffix: str, device_config: PyViCareDeviceConfig, device: PyViCareDevice, - unique_id_suffix: str, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the entity.""" - self._api = device + self._api: PyViCareDevice | PyViCareHeatingDeviceComponent = ( + component if component else device + ) self._attr_unique_id = f"{device_config.getConfig().serial}-{unique_id_suffix}" # valid for compressors, circuits, burners (HeatingDeviceWithComponent) - if hasattr(device, "id"): - self._attr_unique_id += f"-{device.id}" + if component: + self._attr_unique_id += f"-{component.id}" self._attr_device_info = DeviceInfo( identifiers={(DOMAIN, device_config.getConfig().serial)}, - serial_number=device_config.getConfig().serial, + serial_number=device.getSerial(), name=device_config.getModel(), manufacturer="Viessmann", model=device_config.getModel(), diff --git a/homeassistant/components/vicare/fan.py b/homeassistant/components/vicare/fan.py index 088e54c7354..d7dbd037b56 100644 --- a/homeassistant/components/vicare/fan.py +++ b/homeassistant/components/vicare/fan.py @@ -3,6 +3,7 @@ from __future__ import annotations from contextlib import suppress +import enum import logging from PyViCare.PyViCareDevice import Device as PyViCareDevice @@ -28,10 +29,58 @@ from homeassistant.util.percentage import ( from .const import DEVICE_LIST, DOMAIN from .entity import ViCareEntity -from .types import VentilationMode, VentilationProgram _LOGGER = logging.getLogger(__name__) + +class VentilationProgram(enum.StrEnum): + """ViCare preset ventilation programs. + + As listed in https://github.com/somm15/PyViCare/blob/6c5b023ca6c8bb2d38141dd1746dc1705ec84ce8/PyViCare/PyViCareVentilationDevice.py#L37 + """ + + LEVEL_ONE = "levelOne" + LEVEL_TWO = "levelTwo" + LEVEL_THREE = "levelThree" + LEVEL_FOUR = "levelFour" + + +class VentilationMode(enum.StrEnum): + """ViCare ventilation modes.""" + + PERMANENT = "permanent" # on, speed controlled by program (levelOne-levelFour) + VENTILATION = "ventilation" # activated by schedule + SENSOR_DRIVEN = "sensor_driven" # activated by schedule, override by sensor + SENSOR_OVERRIDE = "sensor_override" # activated by sensor + + @staticmethod + def to_vicare_mode(mode: str | None) -> str | None: + """Return the mapped ViCare ventilation mode for the Home Assistant mode.""" + if mode: + try: + ventilation_mode = VentilationMode(mode) + except ValueError: + # ignore unsupported / unmapped modes + return None + return HA_TO_VICARE_MODE_VENTILATION.get(ventilation_mode) if mode else None + return None + + @staticmethod + def from_vicare_mode(vicare_mode: str | None) -> str | None: + """Return the mapped Home Assistant mode for the ViCare ventilation mode.""" + for mode in VentilationMode: + if HA_TO_VICARE_MODE_VENTILATION.get(VentilationMode(mode)) == vicare_mode: + return mode + return None + + +HA_TO_VICARE_MODE_VENTILATION = { + VentilationMode.PERMANENT: "permanent", + VentilationMode.VENTILATION: "ventilation", + VentilationMode.SENSOR_DRIVEN: "sensorDriven", + VentilationMode.SENSOR_OVERRIDE: "sensorOverride", +} + ORDERED_NAMED_FAN_SPEEDS = [ VentilationProgram.LEVEL_ONE, VentilationProgram.LEVEL_TWO, @@ -80,7 +129,7 @@ class ViCareFan(ViCareEntity, FanEntity): device: PyViCareDevice, ) -> None: """Initialize the fan entity.""" - super().__init__(device_config, device, self._attr_translation_key) + super().__init__(self._attr_translation_key, device_config, device) def update(self) -> None: """Update state of fan.""" diff --git a/homeassistant/components/vicare/icons.json b/homeassistant/components/vicare/icons.json index 2f40d8a8822..9d0f27a863c 100644 --- a/homeassistant/components/vicare/icons.json +++ b/homeassistant/components/vicare/icons.json @@ -88,6 +88,8 @@ } }, "services": { - "set_vicare_mode": "mdi:cog" + "set_vicare_mode": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/vicare/manifest.json b/homeassistant/components/vicare/manifest.json index 97c4b91022d..186e9ef6289 100644 --- a/homeassistant/components/vicare/manifest.json +++ b/homeassistant/components/vicare/manifest.json @@ -11,5 +11,5 @@ "documentation": "https://www.home-assistant.io/integrations/vicare", "iot_class": "cloud_polling", "loggers": ["PyViCare"], - "requirements": ["PyViCare==2.32.0"] + "requirements": ["PyViCare-neo==0.2.1"] } diff --git a/homeassistant/components/vicare/number.py b/homeassistant/components/vicare/number.py index c0564170274..3a0cd8dd2cb 100644 --- a/homeassistant/components/vicare/number.py +++ b/homeassistant/components/vicare/number.py @@ -233,30 +233,30 @@ def _build_entities( ) -> list[ViCareNumber]: """Create ViCare number entities for a device.""" - entities: list[ViCareNumber] = [ - ViCareNumber( - device.api, - device.config, - description, - ) - for device in device_list - for description in DEVICE_ENTITY_DESCRIPTIONS - if is_supported(description.key, description, device.api) - ] - - entities.extend( - [ + entities: list[ViCareNumber] = [] + for device in device_list: + # add device entities + entities.extend( ViCareNumber( - circuit, - device.config, description, + device.config, + device.api, + ) + for description in DEVICE_ENTITY_DESCRIPTIONS + if is_supported(description.key, description, device.api) + ) + # add component entities + entities.extend( + ViCareNumber( + description, + device.config, + device.api, + circuit, ) - for device in device_list for circuit in get_circuits(device.api) for description in CIRCUIT_ENTITY_DESCRIPTIONS if is_supported(description.key, description, circuit) - ] - ) + ) return entities @@ -283,12 +283,13 @@ class ViCareNumber(ViCareEntity, NumberEntity): def __init__( self, - api: PyViCareHeatingDeviceComponent, - device_config: PyViCareDeviceConfig, description: ViCareNumberEntityDescription, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the number.""" - super().__init__(device_config, api, description.key) + super().__init__(description.key, device_config, device, component) self.entity_description = description @property diff --git a/homeassistant/components/vicare/sensor.py b/homeassistant/components/vicare/sensor.py index 0271ffc9798..3a16d77249e 100644 --- a/homeassistant/components/vicare/sensor.py +++ b/homeassistant/components/vicare/sensor.py @@ -10,7 +10,7 @@ import logging from PyViCare.PyViCareDevice import Device as PyViCareDevice from PyViCare.PyViCareDeviceConfig import PyViCareDeviceConfig from PyViCare.PyViCareHeatingDevice import ( - HeatingDeviceWithComponent as PyViCareHeatingDeviceWithComponent, + HeatingDeviceWithComponent as PyViCareHeatingDeviceComponent, ) from PyViCare.PyViCareUtils import ( PyViCareInvalidDataError, @@ -747,7 +747,6 @@ GLOBAL_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ), ) - CIRCUIT_SENSORS: tuple[ViCareSensorEntityDescription, ...] = ( ViCareSensorEntityDescription( key="supply_temperature", @@ -865,61 +864,36 @@ def _build_entities( entities: list[ViCareSensor] = [] for device in device_list: - entities.extend(_build_entities_for_device(device.api, device.config)) + # add device entities entities.extend( - _build_entities_for_component( - get_circuits(device.api), device.config, CIRCUIT_SENSORS + ViCareSensor( + description, + device.config, + device.api, ) + for description in GLOBAL_SENSORS + if is_supported(description.key, description, device.api) ) - entities.extend( - _build_entities_for_component( - get_burners(device.api), device.config, BURNER_SENSORS + # add component entities + for component_list, entity_description_list in ( + (get_circuits(device.api), CIRCUIT_SENSORS), + (get_burners(device.api), BURNER_SENSORS), + (get_compressors(device.api), COMPRESSOR_SENSORS), + ): + entities.extend( + ViCareSensor( + description, + device.config, + device.api, + component, + ) + for component in component_list + for description in entity_description_list + if is_supported(description.key, description, component) ) - ) - entities.extend( - _build_entities_for_component( - get_compressors(device.api), device.config, COMPRESSOR_SENSORS - ) - ) return entities -def _build_entities_for_device( - device: PyViCareDevice, - device_config: PyViCareDeviceConfig, -) -> list[ViCareSensor]: - """Create device specific ViCare sensor entities.""" - - return [ - ViCareSensor( - device, - device_config, - description, - ) - for description in GLOBAL_SENSORS - if is_supported(description.key, description, device) - ] - - -def _build_entities_for_component( - components: list[PyViCareHeatingDeviceWithComponent], - device_config: PyViCareDeviceConfig, - entity_descriptions: tuple[ViCareSensorEntityDescription, ...], -) -> list[ViCareSensor]: - """Create component specific ViCare sensor entities.""" - - return [ - ViCareSensor( - component, - device_config, - description, - ) - for component in components - for description in entity_descriptions - if is_supported(description.key, description, component) - ] - - async def async_setup_entry( hass: HomeAssistant, config_entry: ConfigEntry, @@ -943,12 +917,13 @@ class ViCareSensor(ViCareEntity, SensorEntity): def __init__( self, - api, - device_config: PyViCareDeviceConfig, description: ViCareSensorEntityDescription, + device_config: PyViCareDeviceConfig, + device: PyViCareDevice, + component: PyViCareHeatingDeviceComponent | None = None, ) -> None: """Initialize the sensor.""" - super().__init__(device_config, api, description.key) + super().__init__(description.key, device_config, device, component) self.entity_description = description # run update to have device_class set depending on unit_of_measurement self.update() diff --git a/homeassistant/components/vicare/types.py b/homeassistant/components/vicare/types.py index 596605fccdd..7e1ec7f8bee 100644 --- a/homeassistant/components/vicare/types.py +++ b/homeassistant/components/vicare/types.py @@ -64,55 +64,6 @@ VICARE_TO_HA_PRESET_HEATING = { } -class VentilationMode(enum.StrEnum): - """ViCare ventilation modes.""" - - PERMANENT = "permanent" # on, speed controlled by program (levelOne-levelFour) - VENTILATION = "ventilation" # activated by schedule - SENSOR_DRIVEN = "sensor_driven" # activated by schedule, override by sensor - SENSOR_OVERRIDE = "sensor_override" # activated by sensor - - @staticmethod - def to_vicare_mode(mode: str | None) -> str | None: - """Return the mapped ViCare ventilation mode for the Home Assistant mode.""" - if mode: - try: - ventilation_mode = VentilationMode(mode) - except ValueError: - # ignore unsupported / unmapped modes - return None - return HA_TO_VICARE_MODE_VENTILATION.get(ventilation_mode) if mode else None - return None - - @staticmethod - def from_vicare_mode(vicare_mode: str | None) -> str | None: - """Return the mapped Home Assistant mode for the ViCare ventilation mode.""" - for mode in VentilationMode: - if HA_TO_VICARE_MODE_VENTILATION.get(VentilationMode(mode)) == vicare_mode: - return mode - return None - - -HA_TO_VICARE_MODE_VENTILATION = { - VentilationMode.PERMANENT: "permanent", - VentilationMode.VENTILATION: "ventilation", - VentilationMode.SENSOR_DRIVEN: "sensorDriven", - VentilationMode.SENSOR_OVERRIDE: "sensorOverride", -} - - -class VentilationProgram(enum.StrEnum): - """ViCare preset ventilation programs. - - As listed in https://github.com/somm15/PyViCare/blob/6c5b023ca6c8bb2d38141dd1746dc1705ec84ce8/PyViCare/PyViCareVentilationDevice.py#L37 - """ - - LEVEL_ONE = "levelOne" - LEVEL_TWO = "levelTwo" - LEVEL_THREE = "levelThree" - LEVEL_FOUR = "levelFour" - - @dataclass(frozen=True) class ViCareDevice: """Dataclass holding the device api and config.""" diff --git a/homeassistant/components/vicare/water_heater.py b/homeassistant/components/vicare/water_heater.py index 223217f4e13..621d2f2a09b 100644 --- a/homeassistant/components/vicare/water_heater.py +++ b/homeassistant/components/vicare/water_heater.py @@ -69,10 +69,9 @@ def _build_entities( return [ ViCareWater( + device.config, device.api, circuit, - device.config, - "domestic_hot_water", ) for device in device_list for circuit in get_circuits(device.api) @@ -104,20 +103,19 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): _attr_min_temp = VICARE_TEMP_WATER_MIN _attr_max_temp = VICARE_TEMP_WATER_MAX _attr_operation_list = list(HA_TO_VICARE_HVAC_DHW) + _attr_translation_key = "domestic_hot_water" + _current_mode: str | None = None def __init__( self, - api: PyViCareDevice, - circuit: PyViCareHeatingCircuit, device_config: PyViCareDeviceConfig, - translation_key: str, + device: PyViCareDevice, + circuit: PyViCareHeatingCircuit, ) -> None: """Initialize the DHW water_heater device.""" - super().__init__(device_config, api, circuit.id) + super().__init__(circuit.id, device_config, device) self._circuit = circuit self._attributes: dict[str, Any] = {} - self._current_mode = None - self._attr_translation_key = translation_key def update(self) -> None: """Let HA know there has been an update from the ViCare API.""" @@ -151,6 +149,8 @@ class ViCareWater(ViCareEntity, WaterHeaterEntity): self._attr_target_temperature = temp @property - def current_operation(self): + def current_operation(self) -> str | None: """Return current operation ie. heat, cool, idle.""" - return VICARE_TO_HA_HVAC_DHW.get(self._current_mode) + if self._current_mode is None: + return None + return VICARE_TO_HA_HVAC_DHW.get(self._current_mode, None) diff --git a/homeassistant/components/vilfo/config_flow.py b/homeassistant/components/vilfo/config_flow.py index b21c63bfb97..a6cff506f79 100644 --- a/homeassistant/components/vilfo/config_flow.py +++ b/homeassistant/components/vilfo/config_flow.py @@ -1,6 +1,7 @@ """Config flow for Vilfo Router integration.""" import logging +from typing import Any from vilfo import Client as VilfoClient from vilfo.exceptions import ( @@ -9,7 +10,7 @@ from vilfo.exceptions import ( ) import voluptuous as vol -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST, CONF_ID, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError @@ -99,7 +100,9 @@ class DomainConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/vizio/config_flow.py b/homeassistant/components/vizio/config_flow.py index d8b99595f54..c8f1aaa21cb 100644 --- a/homeassistant/components/vizio/config_flow.py +++ b/homeassistant/components/vizio/config_flow.py @@ -285,9 +285,7 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_import( - self, import_config: dict[str, Any] - ) -> ConfigFlowResult: + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Import a config entry from configuration.yaml.""" # Check if new config entry matches any existing config entries for entry in self._async_current_entries(): @@ -296,28 +294,28 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): continue if await self.hass.async_add_executor_job( - _host_is_same, entry.data[CONF_HOST], import_config[CONF_HOST] + _host_is_same, entry.data[CONF_HOST], import_data[CONF_HOST] ): updated_options: dict[str, Any] = {} updated_data: dict[str, Any] = {} remove_apps = False - if entry.data[CONF_HOST] != import_config[CONF_HOST]: - updated_data[CONF_HOST] = import_config[CONF_HOST] + if entry.data[CONF_HOST] != import_data[CONF_HOST]: + updated_data[CONF_HOST] = import_data[CONF_HOST] - if entry.data[CONF_NAME] != import_config[CONF_NAME]: - updated_data[CONF_NAME] = import_config[CONF_NAME] + if entry.data[CONF_NAME] != import_data[CONF_NAME]: + updated_data[CONF_NAME] = import_data[CONF_NAME] # Update entry.data[CONF_APPS] if import_config[CONF_APPS] differs, and # pop entry.data[CONF_APPS] if import_config[CONF_APPS] is not specified - if entry.data.get(CONF_APPS) != import_config.get(CONF_APPS): - if not import_config.get(CONF_APPS): + if entry.data.get(CONF_APPS) != import_data.get(CONF_APPS): + if not import_data.get(CONF_APPS): remove_apps = True else: - updated_options[CONF_APPS] = import_config[CONF_APPS] + updated_options[CONF_APPS] = import_data[CONF_APPS] - if entry.data.get(CONF_VOLUME_STEP) != import_config[CONF_VOLUME_STEP]: - updated_options[CONF_VOLUME_STEP] = import_config[CONF_VOLUME_STEP] + if entry.data.get(CONF_VOLUME_STEP) != import_data[CONF_VOLUME_STEP]: + updated_options[CONF_VOLUME_STEP] = import_data[CONF_VOLUME_STEP] if updated_options or updated_data or remove_apps: new_data = entry.data.copy() @@ -345,9 +343,9 @@ class VizioConfigFlow(ConfigFlow, domain=DOMAIN): self._must_show_form = True # Store config key/value pairs that are not configurable in user step so they # don't get lost on user step - if import_config.get(CONF_APPS): - self._apps = copy.deepcopy(import_config[CONF_APPS]) - return await self.async_step_user(user_input=import_config) + if import_data.get(CONF_APPS): + self._apps = copy.deepcopy(import_data[CONF_APPS]) + return await self.async_step_user(user_input=import_data) async def async_step_zeroconf( self, discovery_info: zeroconf.ZeroconfServiceInfo diff --git a/homeassistant/components/vizio/icons.json b/homeassistant/components/vizio/icons.json index ccdaf816bb0..be6f727de6f 100644 --- a/homeassistant/components/vizio/icons.json +++ b/homeassistant/components/vizio/icons.json @@ -1,5 +1,7 @@ { "services": { - "update_setting": "mdi:cog" + "update_setting": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/voip/voip.py b/homeassistant/components/voip/voip.py index 161e938a3b6..be1e58b6eec 100644 --- a/homeassistant/components/voip/voip.py +++ b/homeassistant/components/voip/voip.py @@ -33,7 +33,7 @@ from homeassistant.components.assist_pipeline import ( ) from homeassistant.components.assist_pipeline.audio_enhancer import ( AudioEnhancer, - MicroVadEnhancer, + MicroVadSpeexEnhancer, ) from homeassistant.components.assist_pipeline.vad import ( AudioBuffer, @@ -235,7 +235,7 @@ class PipelineRtpDatagramProtocol(RtpDatagramProtocol): try: # Wait for speech before starting pipeline segmenter = VoiceCommandSegmenter(silence_seconds=self.silence_seconds) - audio_enhancer = MicroVadEnhancer(0, 0, True) + audio_enhancer = MicroVadSpeexEnhancer(0, 0, True) chunk_buffer: deque[bytes] = deque( maxlen=self.buffered_chunks_before_speech, ) diff --git a/homeassistant/components/volumio/config_flow.py b/homeassistant/components/volumio/config_flow.py index 8edda1d20b0..4c7a48f36c7 100644 --- a/homeassistant/components/volumio/config_flow.py +++ b/homeassistant/components/volumio/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import Any from pyvolumio import CannotConnectError, Volumio import voluptuous as vol @@ -68,7 +69,9 @@ class VolumioConfigFlow(ConfigFlow, domain=DOMAIN): } ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/volvooncall/config_flow.py b/homeassistant/components/volvooncall/config_flow.py index 80358a28ced..b3a1745351b 100644 --- a/homeassistant/components/volvooncall/config_flow.py +++ b/homeassistant/components/volvooncall/config_flow.py @@ -107,7 +107,7 @@ class VolvoOnCallConfigFlow(ConfigFlow, domain=DOMAIN): ) async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Perform reauth upon an API authentication error.""" self._reauth_entry = self.hass.config_entries.async_get_entry( diff --git a/homeassistant/components/vulcan/config_flow.py b/homeassistant/components/vulcan/config_flow.py index 560d777b517..5938e4ce690 100644 --- a/homeassistant/components/vulcan/config_flow.py +++ b/homeassistant/components/vulcan/config_flow.py @@ -44,7 +44,9 @@ class VulcanFlowHandler(ConfigFlow, domain=DOMAIN): self.keystore = None self.students = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle config flow.""" if self._async_current_entries(): return await self.async_step_add_next_config_entry() diff --git a/homeassistant/components/wake_on_lan/icons.json b/homeassistant/components/wake_on_lan/icons.json index 6426c478157..f083b0342f4 100644 --- a/homeassistant/components/wake_on_lan/icons.json +++ b/homeassistant/components/wake_on_lan/icons.json @@ -1,5 +1,7 @@ { "services": { - "send_magic_packet": "mdi:cube-send" + "send_magic_packet": { + "service": "mdi:cube-send" + } } } diff --git a/homeassistant/components/water_heater/__init__.py b/homeassistant/components/water_heater/__init__.py index 731a513fb66..e6e424329fb 100644 --- a/homeassistant/components/water_heater/__init__.py +++ b/homeassistant/components/water_heater/__init__.py @@ -13,7 +13,6 @@ import voluptuous as vol from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( - ATTR_ENTITY_ID, ATTR_TEMPERATURE, PRECISION_TENTHS, PRECISION_WHOLE, @@ -35,7 +34,7 @@ from homeassistant.helpers.deprecation import ( from homeassistant.helpers.entity import Entity, EntityDescription from homeassistant.helpers.entity_component import EntityComponent from homeassistant.helpers.temperature import display_temp as show_temp -from homeassistant.helpers.typing import ConfigType +from homeassistant.helpers.typing import ConfigType, VolDictType from homeassistant.util.unit_conversion import TemperatureConverter from .const import DOMAIN @@ -94,29 +93,16 @@ CONVERTIBLE_ATTRIBUTE = [ATTR_TEMPERATURE] _LOGGER = logging.getLogger(__name__) -ON_OFF_SERVICE_SCHEMA = vol.Schema({vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids}) - -SET_AWAY_MODE_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_AWAY_MODE): cv.boolean, - } -) -SET_TEMPERATURE_SCHEMA = vol.Schema( - vol.All( - { - vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Optional(ATTR_OPERATION_MODE): cv.string, - } - ) -) -SET_OPERATION_MODE_SCHEMA = vol.Schema( - { - vol.Optional(ATTR_ENTITY_ID): cv.comp_entity_ids, - vol.Required(ATTR_OPERATION_MODE): cv.string, - } -) +SET_AWAY_MODE_SCHEMA: VolDictType = { + vol.Required(ATTR_AWAY_MODE): cv.boolean, +} +SET_TEMPERATURE_SCHEMA: VolDictType = { + vol.Required(ATTR_TEMPERATURE, "temperature"): vol.Coerce(float), + vol.Optional(ATTR_OPERATION_MODE): cv.string, +} +SET_OPERATION_MODE_SCHEMA: VolDictType = { + vol.Required(ATTR_OPERATION_MODE): cv.string, +} # mypy: disallow-any-generics @@ -129,10 +115,10 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: await component.async_setup(config) component.async_register_entity_service( - SERVICE_TURN_ON, {}, "async_turn_on", [WaterHeaterEntityFeature.ON_OFF] + SERVICE_TURN_ON, None, "async_turn_on", [WaterHeaterEntityFeature.ON_OFF] ) component.async_register_entity_service( - SERVICE_TURN_OFF, {}, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF] + SERVICE_TURN_OFF, None, "async_turn_off", [WaterHeaterEntityFeature.ON_OFF] ) component.async_register_entity_service( SERVICE_SET_AWAY_MODE, SET_AWAY_MODE_SCHEMA, async_service_away_mode @@ -145,12 +131,6 @@ async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: SET_OPERATION_MODE_SCHEMA, "async_handle_set_operation_mode", ) - component.async_register_entity_service( - SERVICE_TURN_OFF, ON_OFF_SERVICE_SCHEMA, "async_turn_off" - ) - component.async_register_entity_service( - SERVICE_TURN_ON, ON_OFF_SERVICE_SCHEMA, "async_turn_on" - ) return True diff --git a/homeassistant/components/water_heater/icons.json b/homeassistant/components/water_heater/icons.json index af6996374c5..bc80128c6a3 100644 --- a/homeassistant/components/water_heater/icons.json +++ b/homeassistant/components/water_heater/icons.json @@ -22,10 +22,20 @@ } }, "services": { - "set_away_mode": "mdi:account-arrow-right", - "set_operation_mode": "mdi:water-boiler", - "set_temperature": "mdi:thermometer", - "turn_off": "mdi:water-boiler-off", - "turn_on": "mdi:water-boiler" + "set_away_mode": { + "service": "mdi:account-arrow-right" + }, + "set_operation_mode": { + "service": "mdi:water-boiler" + }, + "set_temperature": { + "service": "mdi:thermometer" + }, + "turn_off": { + "service": "mdi:water-boiler-off" + }, + "turn_on": { + "service": "mdi:water-boiler" + } } } diff --git a/homeassistant/components/waze_travel_time/icons.json b/homeassistant/components/waze_travel_time/icons.json index fa95e8fdd8a..98e6f26774c 100644 --- a/homeassistant/components/waze_travel_time/icons.json +++ b/homeassistant/components/waze_travel_time/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "get_travel_times": "mdi:timelapse" + "get_travel_times": { + "service": "mdi:timelapse" + } } } diff --git a/homeassistant/components/weather/icons.json b/homeassistant/components/weather/icons.json index cc53861e700..04b3c1d3df8 100644 --- a/homeassistant/components/weather/icons.json +++ b/homeassistant/components/weather/icons.json @@ -21,7 +21,11 @@ } }, "services": { - "get_forecast": "mdi:weather-cloudy-clock", - "get_forecasts": "mdi:weather-cloudy-clock" + "get_forecast": { + "service": "mdi:weather-cloudy-clock" + }, + "get_forecasts": { + "service": "mdi:weather-cloudy-clock" + } } } diff --git a/homeassistant/components/weatherflow_cloud/config_flow.py b/homeassistant/components/weatherflow_cloud/config_flow.py index e8972c320ed..cbb83b6f25b 100644 --- a/homeassistant/components/weatherflow_cloud/config_flow.py +++ b/homeassistant/components/weatherflow_cloud/config_flow.py @@ -33,9 +33,15 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 async def async_step_reauth( - self, user_input: Mapping[str, Any] + self, entry_data: Mapping[str, Any] ) -> ConfigFlowResult: """Handle a flow for reauth.""" + return await self.async_step_reauth_confirm() + + async def async_step_reauth_confirm( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: + """Handle a flow initiated by reauthentication.""" errors = {} if user_input is not None: @@ -54,7 +60,7 @@ class WeatherFlowCloudConfigFlow(ConfigFlow, domain=DOMAIN): ) return self.async_show_form( - step_id="reauth", + step_id="reauth_confirm", data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}), errors=errors, ) diff --git a/homeassistant/components/weatherflow_cloud/manifest.json b/homeassistant/components/weatherflow_cloud/manifest.json index 354b9642c06..aaa5bce2e16 100644 --- a/homeassistant/components/weatherflow_cloud/manifest.json +++ b/homeassistant/components/weatherflow_cloud/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/weatherflow_cloud", "iot_class": "cloud_polling", "loggers": ["weatherflow4py"], - "requirements": ["weatherflow4py==0.2.21"] + "requirements": ["weatherflow4py==0.2.23"] } diff --git a/homeassistant/components/weatherflow_cloud/sensor.py b/homeassistant/components/weatherflow_cloud/sensor.py index 9314c77a65c..1c7fa5fb377 100644 --- a/homeassistant/components/weatherflow_cloud/sensor.py +++ b/homeassistant/components/weatherflow_cloud/sensor.py @@ -158,8 +158,12 @@ WF_SENSORS: tuple[WeatherFlowCloudSensorEntityDescription, ...] = ( key="lightning_strike_last_epoch", translation_key="lightning_strike_last_epoch", device_class=SensorDeviceClass.TIMESTAMP, - value_fn=lambda data: datetime.fromtimestamp( - data.lightning_strike_last_epoch, tz=UTC + value_fn=( + lambda data: datetime.fromtimestamp( + data.lightning_strike_last_epoch, tz=UTC + ) + if data.lightning_strike_last_epoch is not None + else None ), ), ) diff --git a/homeassistant/components/weatherflow_cloud/strings.json b/homeassistant/components/weatherflow_cloud/strings.json index df561c8b753..f707cbb0353 100644 --- a/homeassistant/components/weatherflow_cloud/strings.json +++ b/homeassistant/components/weatherflow_cloud/strings.json @@ -7,7 +7,7 @@ "api_token": "Personal api token" } }, - "reauth": { + "reauth_confirm": { "description": "Reauthenticate with WeatherFlow", "data": { "api_token": "[%key:component::weatherflow_cloud::config::step::user::data::api_token%]" diff --git a/homeassistant/components/webostv/__init__.py b/homeassistant/components/webostv/__init__.py index 36950b0e02a..499d0a85518 100644 --- a/homeassistant/components/webostv/__init__.py +++ b/homeassistant/components/webostv/__init__.py @@ -40,7 +40,7 @@ from .const import ( WEBOSTV_EXCEPTIONS, ) -CONFIG_SCHEMA = cv.removed(DOMAIN, raise_if_present=False) +CONFIG_SCHEMA = cv.config_entry_only_config_schema(DOMAIN) CALL_SCHEMA = vol.Schema({vol.Required(ATTR_ENTITY_ID): cv.comp_entity_ids}) diff --git a/homeassistant/components/webostv/icons.json b/homeassistant/components/webostv/icons.json index deb9729a99f..edc058d099f 100644 --- a/homeassistant/components/webostv/icons.json +++ b/homeassistant/components/webostv/icons.json @@ -1,7 +1,13 @@ { "services": { - "button": "mdi:button-pointer", - "command": "mdi:console", - "select_sound_output": "mdi:volume-source" + "button": { + "service": "mdi:button-pointer" + }, + "command": { + "service": "mdi:console" + }, + "select_sound_output": { + "service": "mdi:volume-source" + } } } diff --git a/homeassistant/components/websocket_api/commands.py b/homeassistant/components/websocket_api/commands.py index f66930c8d00..c9347012183 100644 --- a/homeassistant/components/websocket_api/commands.py +++ b/homeassistant/components/websocket_api/commands.py @@ -36,6 +36,10 @@ from homeassistant.exceptions import ( ) from homeassistant.helpers import config_validation as cv, entity, template from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.helpers.entityfilter import ( + INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA, + convert_include_exclude_filter, +) from homeassistant.helpers.event import ( TrackTemplate, TrackTemplateResult, @@ -366,14 +370,17 @@ def _send_handle_get_states_response( @callback def _forward_entity_changes( send_message: Callable[[str | bytes | dict[str, Any]], None], - entity_ids: set[str], + entity_ids: set[str] | None, + entity_filter: Callable[[str], bool] | None, user: User, message_id_as_bytes: bytes, event: Event[EventStateChangedData], ) -> None: """Forward entity state changed events to websocket.""" entity_id = event.data["entity_id"] - if entity_ids and entity_id not in entity_ids: + if (entity_ids and entity_id not in entity_ids) or ( + entity_filter and not entity_filter(entity_id) + ): return # We have to lookup the permissions again because the user might have # changed since the subscription was created. @@ -381,7 +388,7 @@ def _forward_entity_changes( if ( not user.is_admin and not permissions.access_all_entities(POLICY_READ) - and not permissions.check_entity(event.data["entity_id"], POLICY_READ) + and not permissions.check_entity(entity_id, POLICY_READ) ): return send_message(messages.cached_state_diff_message(message_id_as_bytes, event)) @@ -392,43 +399,55 @@ def _forward_entity_changes( { vol.Required("type"): "subscribe_entities", vol.Optional("entity_ids"): cv.entity_ids, + **INCLUDE_EXCLUDE_BASE_FILTER_SCHEMA.schema, } ) def handle_subscribe_entities( hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any] ) -> None: """Handle subscribe entities command.""" - entity_ids = set(msg.get("entity_ids", [])) + entity_ids = set(msg.get("entity_ids", [])) or None + _filter = convert_include_exclude_filter(msg) + entity_filter = None if _filter.empty_filter else _filter.get_filter() # We must never await between sending the states and listening for # state changed events or we will introduce a race condition # where some states are missed states = _async_get_allowed_states(hass, connection) - message_id_as_bytes = str(msg["id"]).encode() - connection.subscriptions[msg["id"]] = hass.bus.async_listen( + msg_id = msg["id"] + message_id_as_bytes = str(msg_id).encode() + connection.subscriptions[msg_id] = hass.bus.async_listen( EVENT_STATE_CHANGED, partial( _forward_entity_changes, connection.send_message, entity_ids, + entity_filter, connection.user, message_id_as_bytes, ), ) - connection.send_result(msg["id"]) + connection.send_result(msg_id) # JSON serialize here so we can recover if it blows up due to the # state machine containing unserializable data. This command is required # to succeed for the UI to show. try: - serialized_states = [ - state.as_compressed_state_json - for state in states - if not entity_ids or state.entity_id in entity_ids - ] + if entity_ids or entity_filter: + serialized_states = [ + state.as_compressed_state_json + for state in states + if (not entity_ids or state.entity_id in entity_ids) + and (not entity_filter or entity_filter(state.entity_id)) + ] + else: + # Fast path when not filtering + serialized_states = [state.as_compressed_state_json for state in states] except (ValueError, TypeError): pass else: - _send_handle_entities_init_response(connection, msg["id"], serialized_states) + _send_handle_entities_init_response( + connection, message_id_as_bytes, serialized_states + ) return serialized_states = [] @@ -443,18 +462,22 @@ def handle_subscribe_entities( ), ) - _send_handle_entities_init_response(connection, msg["id"], serialized_states) + _send_handle_entities_init_response( + connection, message_id_as_bytes, serialized_states + ) def _send_handle_entities_init_response( - connection: ActiveConnection, msg_id: int, serialized_states: list[bytes] + connection: ActiveConnection, + message_id_as_bytes: bytes, + serialized_states: list[bytes], ) -> None: """Send handle entities init response.""" connection.send_message( b"".join( ( b'{"id":', - str(msg_id).encode(), + message_id_as_bytes, b',"type":"event","event":{"a":{', b",".join(serialized_states), b"}}}", diff --git a/homeassistant/components/websocket_api/connection.py b/homeassistant/components/websocket_api/connection.py index ef70df4a123..6c0c6f0c587 100644 --- a/homeassistant/components/websocket_api/connection.py +++ b/homeassistant/components/websocket_api/connection.py @@ -223,7 +223,7 @@ class ActiveConnection: try: if schema is False: if len(msg) > 2: - raise vol.Invalid("extra keys not allowed") + raise vol.Invalid("extra keys not allowed") # noqa: TRY301 handler(self.hass, self, msg) else: handler(self.hass, self, schema(msg)) diff --git a/homeassistant/components/websocket_api/http.py b/homeassistant/components/websocket_api/http.py index c65c4c65988..1ad8d909ce8 100644 --- a/homeassistant/components/websocket_api/http.py +++ b/homeassistant/components/websocket_api/http.py @@ -11,6 +11,7 @@ import logging from typing import TYPE_CHECKING, Any, Final from aiohttp import WSMsgType, web +from aiohttp.http_websocket import WebSocketWriter from homeassistant.components.http import KEY_HASS, HomeAssistantView from homeassistant.const import EVENT_HOMEASSISTANT_STOP @@ -124,7 +125,9 @@ class WebSocketHandler: return "finished connection" async def _writer( - self, send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]] + self, + connection: ActiveConnection, + send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], ) -> None: """Write outgoing messages.""" # Variables are set locally to avoid lookups in the loop @@ -134,7 +137,7 @@ class WebSocketHandler: loop = self._loop is_debug_log_enabled = partial(logger.isEnabledFor, logging.DEBUG) debug = logger.debug - can_coalesce = self._connection and self._connection.can_coalesce + can_coalesce = connection.can_coalesce ready_message_count = len(message_queue) # Exceptions if Socket disconnected or cancelled by connection handler try: @@ -148,7 +151,7 @@ class WebSocketHandler: if not can_coalesce: # coalesce may be enabled later in the connection - can_coalesce = self._connection and self._connection.can_coalesce + can_coalesce = connection.can_coalesce if not can_coalesce or ready_message_count == 1: message = message_queue.popleft() @@ -298,19 +301,23 @@ class WebSocketHandler: request = self._request wsock = self._wsock logger = self._logger - debug = logger.debug hass = self._hass - is_enabled_for = logger.isEnabledFor - logging_debug = logging.DEBUG try: async with asyncio.timeout(10): await wsock.prepare(request) + except ConnectionResetError: + # Likely the client disconnected before we prepared the websocket + logger.debug( + "%s: Connection reset by peer while preparing WebSocket", + self.description, + ) + return wsock except TimeoutError: - self._logger.warning("Timeout preparing request from %s", request.remote) + logger.warning("Timeout preparing request from %s", request.remote) return wsock - debug("%s: Connected from %s", self.description, request.remote) + logger.debug("%s: Connected from %s", self.description, request.remote) self._handle_task = asyncio.current_task() unsub_stop = hass.bus.async_listen( @@ -325,134 +332,25 @@ class WebSocketHandler: auth = AuthPhase( logger, hass, self._send_message, self._cancel, request, send_bytes_text ) - connection = None - disconnect_warn = None + connection: ActiveConnection | None = None + disconnect_warn: str | None = None try: - await send_bytes_text(AUTH_REQUIRED_MESSAGE) - - # Auth Phase - try: - msg = await wsock.receive(10) - except TimeoutError as err: - disconnect_warn = "Did not receive auth message within 10 seconds" - raise Disconnect from err - - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): - raise Disconnect - - if msg.type != WSMsgType.TEXT: - disconnect_warn = "Received non-Text message." - raise Disconnect - - try: - auth_msg_data = json_loads(msg.data) - except ValueError as err: - disconnect_warn = "Received invalid JSON." - raise Disconnect from err - - if is_enabled_for(logging_debug): - debug("%s: Received %s", self.description, auth_msg_data) - connection = await auth.async_handle(auth_msg_data) - # As the webserver is now started before the start - # event we do not want to block for websocket responses - # - # We only start the writer queue after the auth phase is completed - # since there is no need to queue messages before the auth phase - self._connection = connection - self._writer_task = create_eager_task(self._writer(send_bytes_text)) - hass.data[DATA_CONNECTIONS] = hass.data.get(DATA_CONNECTIONS, 0) + 1 - async_dispatcher_send(hass, SIGNAL_WEBSOCKET_CONNECTED) - - self._authenticated = True - # - # - # Our websocket implementation is backed by a deque - # - # As back-pressure builds, the queue will back up and use more memory - # until we disconnect the client when the queue size reaches - # MAX_PENDING_MSG. When we are generating a high volume of websocket messages, - # we hit a bottleneck in aiohttp where it will wait for - # the buffer to drain before sending the next message and messages - # start backing up in the queue. - # - # https://github.com/aio-libs/aiohttp/issues/1367 added drains - # to the websocket writer to handle malicious clients and network issues. - # The drain causes multiple problems for us since the buffer cannot be - # drained fast enough when we deliver a high volume or large messages: - # - # - We end up disconnecting the client. The client will then reconnect, - # and the cycle repeats itself, which results in a significant amount of - # CPU usage. - # - # - Messages latency increases because messages cannot be moved into - # the TCP buffer because it is blocked waiting for the drain to happen because - # of the low default limit of 16KiB. By increasing the limit, we instead - # rely on the underlying TCP buffer and stack to deliver the messages which - # can typically happen much faster. - # - # After the auth phase is completed, and we are not concerned about - # the user being a malicious client, we set the limit to force a drain - # to 1MiB. 1MiB is the maximum expected size of the serialized entity - # registry, which is the largest message we usually send. - # - # https://github.com/aio-libs/aiohttp/commit/b3c80ee3f7d5d8f0b8bc27afe52e4d46621eaf99 - # added a way to set the limit, but there is no way to actually - # reach the code to set the limit, so we have to set it directly. - # - writer._limit = 2**20 # noqa: SLF001 - async_handle_str = connection.async_handle - async_handle_binary = connection.async_handle_binary - - # Command phase - while not wsock.closed: - msg = await wsock.receive() - - if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): - break - - if msg.type is WSMsgType.BINARY: - if len(msg.data) < 1: - disconnect_warn = "Received invalid binary message." - break - handler = msg.data[0] - payload = msg.data[1:] - async_handle_binary(handler, payload) - continue - - if msg.type is not WSMsgType.TEXT: - disconnect_warn = "Received non-Text message." - break - - try: - command_msg_data = json_loads(msg.data) - except ValueError: - disconnect_warn = "Received invalid JSON." - break - - if is_enabled_for(logging_debug): - debug("%s: Received %s", self.description, command_msg_data) - - # command_msg_data is always deserialized from JSON as a list - if type(command_msg_data) is not list: # noqa: E721 - async_handle_str(command_msg_data) - continue - - for split_msg in command_msg_data: - async_handle_str(split_msg) - + connection = await self._async_handle_auth_phase(auth, send_bytes_text) + self._async_increase_writer_limit(writer) + await self._async_websocket_command_phase(connection, send_bytes_text) except asyncio.CancelledError: - debug("%s: Connection cancelled", self.description) + logger.debug("%s: Connection cancelled", self.description) raise - except Disconnect as ex: - debug("%s: Connection closed by client: %s", self.description, ex) + if disconnect_msg := str(ex): + disconnect_warn = disconnect_msg + logger.debug("%s: Connection closed by client: %s", self.description, ex) except Exception: - self._logger.exception( + logger.exception( "%s: Unexpected error inside websocket API", self.description ) - finally: unsub_stop() @@ -465,38 +363,175 @@ class WebSocketHandler: if self._ready_future and not self._ready_future.done(): self._ready_future.set_result(len(self._message_queue)) - # If the writer gets canceled we still need to close the websocket - # so we have another finally block to make sure we close the websocket - # if the writer gets canceled. - try: - if self._writer_task: - await self._writer_task - finally: - try: - # Make sure all error messages are written before closing - await wsock.close() - finally: - if disconnect_warn is None: - debug("%s: Disconnected", self.description) - else: - self._logger.warning( - "%s: Disconnected: %s", self.description, disconnect_warn - ) - - if connection is not None: - hass.data[DATA_CONNECTIONS] -= 1 - self._connection = None - - async_dispatcher_send(hass, SIGNAL_WEBSOCKET_DISCONNECTED) - - # Break reference cycles to make sure GC can happen sooner - self._wsock = None # type: ignore[assignment] - self._request = None # type: ignore[assignment] - self._hass = None # type: ignore[assignment] - self._logger = None # type: ignore[assignment] - self._message_queue = None # type: ignore[assignment] - self._handle_task = None - self._writer_task = None - self._ready_future = None + await self._async_cleanup_writer_and_close(disconnect_warn, connection) return wsock + + async def _async_handle_auth_phase( + self, + auth: AuthPhase, + send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], + ) -> ActiveConnection: + """Handle the auth phase of the websocket connection.""" + await send_bytes_text(AUTH_REQUIRED_MESSAGE) + + # Auth Phase + try: + msg = await self._wsock.receive(10) + except TimeoutError as err: + raise Disconnect("Did not receive auth message within 10 seconds") from err + + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + raise Disconnect("Received close message during auth phase") + + if msg.type is not WSMsgType.TEXT: + raise Disconnect("Received non-Text message during auth phase") + + try: + auth_msg_data = json_loads(msg.data) + except ValueError as err: + raise Disconnect("Received invalid JSON during auth phase") from err + + if self._logger.isEnabledFor(logging.DEBUG): + self._logger.debug("%s: Received %s", self.description, auth_msg_data) + connection = await auth.async_handle(auth_msg_data) + # As the webserver is now started before the start + # event we do not want to block for websocket responses + # + # We only start the writer queue after the auth phase is completed + # since there is no need to queue messages before the auth phase + self._connection = connection + self._writer_task = create_eager_task(self._writer(connection, send_bytes_text)) + self._hass.data[DATA_CONNECTIONS] = self._hass.data.get(DATA_CONNECTIONS, 0) + 1 + async_dispatcher_send(self._hass, SIGNAL_WEBSOCKET_CONNECTED) + + self._authenticated = True + return connection + + @callback + def _async_increase_writer_limit(self, writer: WebSocketWriter) -> None: + # + # + # Our websocket implementation is backed by a deque + # + # As back-pressure builds, the queue will back up and use more memory + # until we disconnect the client when the queue size reaches + # MAX_PENDING_MSG. When we are generating a high volume of websocket messages, + # we hit a bottleneck in aiohttp where it will wait for + # the buffer to drain before sending the next message and messages + # start backing up in the queue. + # + # https://github.com/aio-libs/aiohttp/issues/1367 added drains + # to the websocket writer to handle malicious clients and network issues. + # The drain causes multiple problems for us since the buffer cannot be + # drained fast enough when we deliver a high volume or large messages: + # + # - We end up disconnecting the client. The client will then reconnect, + # and the cycle repeats itself, which results in a significant amount of + # CPU usage. + # + # - Messages latency increases because messages cannot be moved into + # the TCP buffer because it is blocked waiting for the drain to happen because + # of the low default limit of 16KiB. By increasing the limit, we instead + # rely on the underlying TCP buffer and stack to deliver the messages which + # can typically happen much faster. + # + # After the auth phase is completed, and we are not concerned about + # the user being a malicious client, we set the limit to force a drain + # to 1MiB. 1MiB is the maximum expected size of the serialized entity + # registry, which is the largest message we usually send. + # + # https://github.com/aio-libs/aiohttp/commit/b3c80ee3f7d5d8f0b8bc27afe52e4d46621eaf99 + # added a way to set the limit, but there is no way to actually + # reach the code to set the limit, so we have to set it directly. + # + writer._limit = 2**20 # noqa: SLF001 + + async def _async_websocket_command_phase( + self, + connection: ActiveConnection, + send_bytes_text: Callable[[bytes], Coroutine[Any, Any, None]], + ) -> None: + """Handle the command phase of the websocket connection.""" + wsock = self._wsock + async_handle_str = connection.async_handle + async_handle_binary = connection.async_handle_binary + _debug_enabled = partial(self._logger.isEnabledFor, logging.DEBUG) + + # Command phase + while not wsock.closed: + msg = await wsock.receive() + + if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSED, WSMsgType.CLOSING): + break + + if msg.type is WSMsgType.BINARY: + if len(msg.data) < 1: + raise Disconnect("Received invalid binary message.") + + handler = msg.data[0] + payload = msg.data[1:] + async_handle_binary(handler, payload) + continue + + if msg.type is not WSMsgType.TEXT: + raise Disconnect("Received non-Text message.") + + try: + command_msg_data = json_loads(msg.data) + except ValueError as ex: + raise Disconnect("Received invalid JSON.") from ex + + if _debug_enabled(): + self._logger.debug( + "%s: Received %s", self.description, command_msg_data + ) + + # command_msg_data is always deserialized from JSON as a list + if type(command_msg_data) is not list: # noqa: E721 + async_handle_str(command_msg_data) + continue + + for split_msg in command_msg_data: + async_handle_str(split_msg) + + async def _async_cleanup_writer_and_close( + self, disconnect_warn: str | None, connection: ActiveConnection | None + ) -> None: + """Cleanup the writer and close the websocket.""" + # If the writer gets canceled we still need to close the websocket + # so we have another finally block to make sure we close the websocket + # if the writer gets canceled. + wsock = self._wsock + hass = self._hass + logger = self._logger + try: + if self._writer_task: + await self._writer_task + finally: + try: + # Make sure all error messages are written before closing + await wsock.close() + finally: + if disconnect_warn is None: + logger.debug("%s: Disconnected", self.description) + else: + logger.warning( + "%s: Disconnected: %s", self.description, disconnect_warn + ) + + if connection is not None: + hass.data[DATA_CONNECTIONS] -= 1 + self._connection = None + + async_dispatcher_send(hass, SIGNAL_WEBSOCKET_DISCONNECTED) + + # Break reference cycles to make sure GC can happen sooner + self._wsock = None # type: ignore[assignment] + self._request = None # type: ignore[assignment] + self._hass = None # type: ignore[assignment] + self._logger = None # type: ignore[assignment] + self._message_queue = None # type: ignore[assignment] + self._handle_task = None + self._writer_task = None + self._ready_future = None diff --git a/homeassistant/components/websocket_api/messages.py b/homeassistant/components/websocket_api/messages.py index 238f8be0c3b..0a8200c5700 100644 --- a/homeassistant/components/websocket_api/messages.py +++ b/homeassistant/components/websocket_api/messages.py @@ -224,9 +224,12 @@ def _state_diff_event( if (old_attributes := old_state.attributes) != ( new_attributes := new_state.attributes ): - for key, value in new_attributes.items(): - if old_attributes.get(key) != value: - additions.setdefault(COMPRESSED_STATE_ATTRIBUTES, {})[key] = value + if added := { + key: value + for key, value in new_attributes.items() + if key not in old_attributes or old_attributes[key] != value + }: + additions[COMPRESSED_STATE_ATTRIBUTES] = added if removed := old_attributes.keys() - new_attributes: # sets are not JSON serializable by default so we convert to list # here if there are any values to avoid jumping into the json_encoder_default diff --git a/homeassistant/components/wemo/fan.py b/homeassistant/components/wemo/fan.py index b7c9840bcdc..f9d3270aaa0 100644 --- a/homeassistant/components/wemo/fan.py +++ b/homeassistant/components/wemo/fan.py @@ -67,7 +67,7 @@ async def async_setup_entry( # This will call WemoHumidifier.reset_filter_life() platform.async_register_entity_service( - SERVICE_RESET_FILTER_LIFE, {}, WemoHumidifier.reset_filter_life.__name__ + SERVICE_RESET_FILTER_LIFE, None, WemoHumidifier.reset_filter_life.__name__ ) diff --git a/homeassistant/components/wemo/icons.json b/homeassistant/components/wemo/icons.json index c5ddf5912d6..af5024afcff 100644 --- a/homeassistant/components/wemo/icons.json +++ b/homeassistant/components/wemo/icons.json @@ -1,6 +1,10 @@ { "services": { - "set_humidity": "mdi:water-percent", - "reset_filter_life": "mdi:refresh" + "set_humidity": { + "service": "mdi:water-percent" + }, + "reset_filter_life": { + "service": "mdi:refresh" + } } } diff --git a/homeassistant/components/wiffi/config_flow.py b/homeassistant/components/wiffi/config_flow.py index 17262dd0276..6e4872ea400 100644 --- a/homeassistant/components/wiffi/config_flow.py +++ b/homeassistant/components/wiffi/config_flow.py @@ -6,11 +6,17 @@ Used by UI to setup a wiffi integration. from __future__ import annotations import errno +from typing import Any import voluptuous as vol from wiffi import WiffiTcpServer -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_PORT, CONF_TIMEOUT from homeassistant.core import callback @@ -30,7 +36,9 @@ class WiffiFlowHandler(ConfigFlow, domain=DOMAIN): """Create Wiffi server setup option flow.""" return OptionsFlowHandler(config_entry) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the start of the config flow. Called after wiffi integration has been selected in the 'add integration diff --git a/homeassistant/components/wilight/config_flow.py b/homeassistant/components/wilight/config_flow.py index 52b3b426c20..babc011fc35 100644 --- a/homeassistant/components/wilight/config_flow.py +++ b/homeassistant/components/wilight/config_flow.py @@ -24,13 +24,13 @@ class WiLightFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize the WiLight flow.""" self._host = None self._serial_number = None self._title = None self._model_name = None - self._wilight_components = [] + self._wilight_components: list[str] = [] self._components_text = "" def _wilight_update(self, host, serial_number, model_name): diff --git a/homeassistant/components/wilight/icons.json b/homeassistant/components/wilight/icons.json index 3c5d0112de1..48bcae2a301 100644 --- a/homeassistant/components/wilight/icons.json +++ b/homeassistant/components/wilight/icons.json @@ -10,8 +10,14 @@ } }, "services": { - "set_watering_time": "mdi:timer", - "set_pause_time": "mdi:timer-pause", - "set_trigger": "mdi:gesture-tap-button" + "set_watering_time": { + "service": "mdi:timer" + }, + "set_pause_time": { + "service": "mdi:timer-pause" + }, + "set_trigger": { + "service": "mdi:gesture-tap-button" + } } } diff --git a/homeassistant/components/withings/manifest.json b/homeassistant/components/withings/manifest.json index 090f8c4588e..a7f632325a0 100644 --- a/homeassistant/components/withings/manifest.json +++ b/homeassistant/components/withings/manifest.json @@ -9,5 +9,5 @@ "iot_class": "cloud_push", "loggers": ["aiowithings"], "quality_scale": "platinum", - "requirements": ["aiowithings==3.0.2"] + "requirements": ["aiowithings==3.0.3"] } diff --git a/homeassistant/components/wolflink/config_flow.py b/homeassistant/components/wolflink/config_flow.py index 6e218bfd1ce..a2678580a23 100644 --- a/homeassistant/components/wolflink/config_flow.py +++ b/homeassistant/components/wolflink/config_flow.py @@ -1,13 +1,14 @@ """Config flow for Wolf SmartSet Service integration.""" import logging +from typing import Any from httpcore import ConnectError import voluptuous as vol from wolf_comm.token_auth import InvalidAuth from wolf_comm.wolf_client import WolfClient -from homeassistant.config_entries import ConfigFlow +from homeassistant.config_entries import ConfigFlow, ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from .const import DEVICE_GATEWAY, DEVICE_ID, DEVICE_NAME, DOMAIN @@ -30,7 +31,9 @@ class WolfLinkConfigFlow(ConfigFlow, domain=DOMAIN): self.password = None self.fetched_systems = None - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step to get connection parameters.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/workday/icons.json b/homeassistant/components/workday/icons.json index 10d3c93a288..ec5c64dce97 100644 --- a/homeassistant/components/workday/icons.json +++ b/homeassistant/components/workday/icons.json @@ -1,5 +1,7 @@ { "services": { - "check_date": "mdi:calendar-check" + "check_date": { + "service": "mdi:calendar-check" + } } } diff --git a/homeassistant/components/workday/manifest.json b/homeassistant/components/workday/manifest.json index fafa870d00a..297b20b8c0e 100644 --- a/homeassistant/components/workday/manifest.json +++ b/homeassistant/components/workday/manifest.json @@ -7,5 +7,5 @@ "iot_class": "local_polling", "loggers": ["holidays"], "quality_scale": "internal", - "requirements": ["holidays==0.55"] + "requirements": ["holidays==0.56"] } diff --git a/homeassistant/components/ws66i/config_flow.py b/homeassistant/components/ws66i/config_flow.py index b0cf6717e4d..330e9963f95 100644 --- a/homeassistant/components/ws66i/config_flow.py +++ b/homeassistant/components/ws66i/config_flow.py @@ -8,7 +8,12 @@ from typing import Any from pyws66i import WS66i, get_ws66i import voluptuous as vol -from homeassistant.config_entries import ConfigEntry, ConfigFlow, OptionsFlow +from homeassistant.config_entries import ( + ConfigEntry, + ConfigFlow, + ConfigFlowResult, + OptionsFlow, +) from homeassistant.const import CONF_IP_ADDRESS from homeassistant.core import HomeAssistant, callback from homeassistant.exceptions import HomeAssistantError @@ -94,7 +99,9 @@ class WS66iConfigFlow(ConfigFlow, domain=DOMAIN): VERSION = 1 - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: diff --git a/homeassistant/components/wyoming/data.py b/homeassistant/components/wyoming/data.py index e333a740741..1ee0f24f805 100644 --- a/homeassistant/components/wyoming/data.py +++ b/homeassistant/components/wyoming/data.py @@ -100,7 +100,7 @@ async def load_wyoming_info( while True: event = await client.read_event() if event is None: - raise WyomingError( + raise WyomingError( # noqa: TRY301 "Connection closed unexpectedly", ) diff --git a/homeassistant/components/xbox/config_flow.py b/homeassistant/components/xbox/config_flow.py index e1434aac67c..86157be5d7f 100644 --- a/homeassistant/components/xbox/config_flow.py +++ b/homeassistant/components/xbox/config_flow.py @@ -1,7 +1,9 @@ """Config flow for xbox.""" import logging +from typing import Any +from homeassistant.config_entries import ConfigFlowResult from homeassistant.helpers import config_entry_oauth2_flow from .const import DOMAIN @@ -25,7 +27,9 @@ class OAuth2FlowHandler( scopes = ["Xboxlive.signin", "Xboxlive.offline_access"] return {"scope": " ".join(scopes)} - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow start.""" await self.async_set_unique_id(DOMAIN) diff --git a/homeassistant/components/xiaomi/camera.py b/homeassistant/components/xiaomi/camera.py index 323a0f8a157..8ab15f85147 100644 --- a/homeassistant/components/xiaomi/camera.py +++ b/homeassistant/components/xiaomi/camera.py @@ -80,7 +80,6 @@ class XiaomiCamera(Camera): self._manager = get_ffmpeg_manager(hass) self._name = config[CONF_NAME] self.host = config[CONF_HOST] - self.host.hass = hass self._model = config[CONF_MODEL] self.port = config[CONF_PORT] self.path = config[CONF_PATH] diff --git a/homeassistant/components/xiaomi_aqara/config_flow.py b/homeassistant/components/xiaomi_aqara/config_flow.py index 8f391c8ddf3..a89bb8447a3 100644 --- a/homeassistant/components/xiaomi_aqara/config_flow.py +++ b/homeassistant/components/xiaomi_aqara/config_flow.py @@ -2,6 +2,7 @@ import logging from socket import gaierror +from typing import TYPE_CHECKING, Any import voluptuous as vol from xiaomi_gateway import MULTICAST_PORT, XiaomiGateway, XiaomiGatewayDiscovery @@ -49,13 +50,13 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): VERSION = 1 - def __init__(self): + def __init__(self) -> None: """Initialize.""" - self.host = None + self.host: str | None = None self.interface = DEFAULT_INTERFACE - self.sid = None - self.gateways = None - self.selected_gateway = None + self.sid: str | None = None + self.gateways: dict[str, XiaomiGateway] | None = None + self.selected_gateway: XiaomiGateway | None = None @callback def async_show_form_step_user(self, errors): @@ -66,9 +67,11 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): return self.async_show_form(step_id="user", data_schema=schema, errors=errors) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle a flow initialized by the user.""" - errors = {} + errors: dict[str, str] = {} if user_input is None: return self.async_show_form_step_user(errors) @@ -96,6 +99,8 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): None, ) + if TYPE_CHECKING: + assert self.selected_gateway if self.selected_gateway.connection_error: errors[CONF_HOST] = "invalid_host" if self.selected_gateway.mac_error: @@ -115,6 +120,8 @@ class XiaomiAqaraFlowHandler(ConfigFlow, domain=DOMAIN): self.gateways = xiaomi.gateways + if TYPE_CHECKING: + assert self.gateways is not None if len(self.gateways) == 1: self.selected_gateway = list(self.gateways.values())[0] self.sid = self.selected_gateway.sid diff --git a/homeassistant/components/xiaomi_aqara/icons.json b/homeassistant/components/xiaomi_aqara/icons.json index 4975414833d..62149b0dd40 100644 --- a/homeassistant/components/xiaomi_aqara/icons.json +++ b/homeassistant/components/xiaomi_aqara/icons.json @@ -1,8 +1,16 @@ { "services": { - "add_device": "mdi:cellphone-link", - "play_ringtone": "mdi:music", - "remove_device": "mdi:cellphone-link", - "stop_ringtone": "mdi:music-off" + "add_device": { + "service": "mdi:cellphone-link" + }, + "play_ringtone": { + "service": "mdi:music" + }, + "remove_device": { + "service": "mdi:cellphone-link" + }, + "stop_ringtone": { + "service": "mdi:music-off" + } } } diff --git a/homeassistant/components/xiaomi_miio/icons.json b/homeassistant/components/xiaomi_miio/icons.json index 2e5084a1f6c..cc0800f1d9d 100644 --- a/homeassistant/components/xiaomi_miio/icons.json +++ b/homeassistant/components/xiaomi_miio/icons.json @@ -14,29 +14,77 @@ } }, "services": { - "fan_reset_filter": "mdi:refresh", - "fan_set_extra_features": "mdi:cog", - "light_set_scene": "mdi:palette", - "light_set_delayed_turn_off": "mdi:timer", - "light_reminder_on": "mdi:alarm", - "light_reminder_off": "mdi:alarm-off", - "light_night_light_mode_on": "mdi:weather-night", - "light_night_light_mode_off": "mdi:weather-sunny", - "light_eyecare_mode_on": "mdi:eye", - "light_eyecare_mode_off": "mdi:eye-off", - "remote_learn_command": "mdi:remote", - "remote_set_led_on": "mdi:led-on", - "remote_set_led_off": "mdi:led-off", - "switch_set_wifi_led_on": "mdi:wifi", - "switch_set_wifi_led_off": "mdi:wifi-off", - "switch_set_power_price": "mdi:currency-usd", - "switch_set_power_mode": "mdi:power", - "vacuum_remote_control_start": "mdi:play", - "vacuum_remote_control_stop": "mdi:stop", - "vacuum_remote_control_move": "mdi:remote", - "vacuum_remote_control_move_step": "mdi:remote", - "vacuum_clean_zone": "mdi:map-marker", - "vacuum_goto": "mdi:map-marker", - "vacuum_clean_segment": "mdi:map-marker" + "fan_reset_filter": { + "service": "mdi:refresh" + }, + "fan_set_extra_features": { + "service": "mdi:cog" + }, + "light_set_scene": { + "service": "mdi:palette" + }, + "light_set_delayed_turn_off": { + "service": "mdi:timer" + }, + "light_reminder_on": { + "service": "mdi:alarm" + }, + "light_reminder_off": { + "service": "mdi:alarm-off" + }, + "light_night_light_mode_on": { + "service": "mdi:weather-night" + }, + "light_night_light_mode_off": { + "service": "mdi:weather-sunny" + }, + "light_eyecare_mode_on": { + "service": "mdi:eye" + }, + "light_eyecare_mode_off": { + "service": "mdi:eye-off" + }, + "remote_learn_command": { + "service": "mdi:remote" + }, + "remote_set_led_on": { + "service": "mdi:led-on" + }, + "remote_set_led_off": { + "service": "mdi:led-off" + }, + "switch_set_wifi_led_on": { + "service": "mdi:wifi" + }, + "switch_set_wifi_led_off": { + "service": "mdi:wifi-off" + }, + "switch_set_power_price": { + "service": "mdi:currency-usd" + }, + "switch_set_power_mode": { + "service": "mdi:power" + }, + "vacuum_remote_control_start": { + "service": "mdi:play" + }, + "vacuum_remote_control_stop": { + "service": "mdi:stop" + }, + "vacuum_remote_control_move": { + "service": "mdi:remote" + }, + "vacuum_remote_control_move_step": { + "service": "mdi:remote" + }, + "vacuum_clean_zone": { + "service": "mdi:map-marker" + }, + "vacuum_goto": { + "service": "mdi:map-marker" + }, + "vacuum_clean_segment": { + "service": "mdi:map-marker" + } } } diff --git a/homeassistant/components/xiaomi_miio/remote.py b/homeassistant/components/xiaomi_miio/remote.py index 959bf0a7bee..72707109ad6 100644 --- a/homeassistant/components/xiaomi_miio/remote.py +++ b/homeassistant/components/xiaomi_miio/remote.py @@ -170,12 +170,12 @@ async def async_setup_platform( ) platform.async_register_entity_service( SERVICE_SET_REMOTE_LED_ON, - {}, + None, async_service_led_on_handler, ) platform.async_register_entity_service( SERVICE_SET_REMOTE_LED_OFF, - {}, + None, async_service_led_off_handler, ) diff --git a/homeassistant/components/xiaomi_miio/vacuum.py b/homeassistant/components/xiaomi_miio/vacuum.py index ef6f94c162f..ac833f7646c 100644 --- a/homeassistant/components/xiaomi_miio/vacuum.py +++ b/homeassistant/components/xiaomi_miio/vacuum.py @@ -104,13 +104,13 @@ async def async_setup_entry( platform.async_register_entity_service( SERVICE_START_REMOTE_CONTROL, - {}, + None, MiroboVacuum.async_remote_control_start.__name__, ) platform.async_register_entity_service( SERVICE_STOP_REMOTE_CONTROL, - {}, + None, MiroboVacuum.async_remote_control_stop.__name__, ) diff --git a/homeassistant/components/yale/__init__.py b/homeassistant/components/yale/__init__.py new file mode 100644 index 00000000000..1cbd9c87b57 --- /dev/null +++ b/homeassistant/components/yale/__init__.py @@ -0,0 +1,81 @@ +"""Support for Yale devices.""" + +from __future__ import annotations + +from pathlib import Path +from typing import cast + +from aiohttp import ClientResponseError +from yalexs.const import Brand +from yalexs.exceptions import YaleApiError +from yalexs.manager.const import CONF_BRAND +from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation +from yalexs.manager.gateway import Config as YaleXSConfig + +from homeassistant.config_entries import ConfigEntry +from homeassistant.const import EVENT_HOMEASSISTANT_STOP +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady +from homeassistant.helpers import config_entry_oauth2_flow, device_registry as dr + +from .const import DOMAIN, PLATFORMS +from .data import YaleData +from .gateway import YaleGateway +from .util import async_create_yale_clientsession + +type YaleConfigEntry = ConfigEntry[YaleData] + + +async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: + """Set up yale from a config entry.""" + session = async_create_yale_clientsession(hass) + implementation = ( + await config_entry_oauth2_flow.async_get_config_entry_implementation( + hass, entry + ) + ) + oauth_session = config_entry_oauth2_flow.OAuth2Session(hass, entry, implementation) + yale_gateway = YaleGateway(Path(hass.config.config_dir), session, oauth_session) + try: + await async_setup_yale(hass, entry, yale_gateway) + except (RequireValidation, InvalidAuth) as err: + raise ConfigEntryAuthFailed from err + except TimeoutError as err: + raise ConfigEntryNotReady("Timed out connecting to yale api") from err + except (YaleApiError, ClientResponseError, CannotConnect) as err: + raise ConfigEntryNotReady from err + await hass.config_entries.async_forward_entry_setups(entry, PLATFORMS) + return True + + +async def async_unload_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool: + """Unload a config entry.""" + return await hass.config_entries.async_unload_platforms(entry, PLATFORMS) + + +async def async_setup_yale( + hass: HomeAssistant, entry: YaleConfigEntry, yale_gateway: YaleGateway +) -> None: + """Set up the yale component.""" + config = cast(YaleXSConfig, entry.data) + await yale_gateway.async_setup({**config, CONF_BRAND: Brand.YALE_GLOBAL}) + await yale_gateway.async_authenticate() + await yale_gateway.async_refresh_access_token_if_needed() + data = entry.runtime_data = YaleData(hass, yale_gateway) + entry.async_on_unload( + hass.bus.async_listen(EVENT_HOMEASSISTANT_STOP, data.async_stop) + ) + entry.async_on_unload(data.async_stop) + await data.async_setup() + + +async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: YaleConfigEntry, device_entry: dr.DeviceEntry +) -> bool: + """Remove yale config entry from a device if its no longer present.""" + return not any( + identifier + for identifier in device_entry.identifiers + if identifier[0] == DOMAIN + and config_entry.runtime_data.get_device(identifier[1]) + ) diff --git a/homeassistant/components/yale/application_credentials.py b/homeassistant/components/yale/application_credentials.py new file mode 100644 index 00000000000..31b5b7a92c7 --- /dev/null +++ b/homeassistant/components/yale/application_credentials.py @@ -0,0 +1,15 @@ +"""application_credentials platform the yale integration.""" + +from homeassistant.components.application_credentials import AuthorizationServer +from homeassistant.core import HomeAssistant + +OAUTH2_AUTHORIZE = "https://oauth.aaecosystem.com/authorize" +OAUTH2_TOKEN = "https://oauth.aaecosystem.com/access_token" + + +async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer: + """Return authorization server.""" + return AuthorizationServer( + authorize_url=OAUTH2_AUTHORIZE, + token_url=OAUTH2_TOKEN, + ) diff --git a/homeassistant/components/yale/binary_sensor.py b/homeassistant/components/yale/binary_sensor.py new file mode 100644 index 00000000000..dbb00ad7d42 --- /dev/null +++ b/homeassistant/components/yale/binary_sensor.py @@ -0,0 +1,188 @@ +"""Support for Yale binary sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from datetime import datetime, timedelta +from functools import partial +import logging + +from yalexs.activity import Activity, ActivityType +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail, LockDoorStatus +from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL +from yalexs.util import update_lock_detail_from_activity + +from homeassistant.components.binary_sensor import ( + BinarySensorDeviceClass, + BinarySensorEntity, + BinarySensorEntityDescription, +) +from homeassistant.const import EntityCategory +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.event import async_call_later + +from . import YaleConfigEntry, YaleData +from .entity import YaleDescriptionEntity +from .util import ( + retrieve_ding_activity, + retrieve_doorbell_motion_activity, + retrieve_online_state, + retrieve_time_based_activity, +) + +_LOGGER = logging.getLogger(__name__) + +TIME_TO_RECHECK_DETECTION = timedelta( + seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds() * 3 +) + + +@dataclass(frozen=True, kw_only=True) +class YaleDoorbellBinarySensorEntityDescription(BinarySensorEntityDescription): + """Describes Yale binary_sensor entity.""" + + value_fn: Callable[[YaleData, DoorbellDetail | LockDetail], Activity | None] + is_time_based: bool + + +SENSOR_TYPE_DOOR = BinarySensorEntityDescription( + key="open", + device_class=BinarySensorDeviceClass.DOOR, +) + +SENSOR_TYPES_VIDEO_DOORBELL = ( + YaleDoorbellBinarySensorEntityDescription( + key="motion", + device_class=BinarySensorDeviceClass.MOTION, + value_fn=retrieve_doorbell_motion_activity, + is_time_based=True, + ), + YaleDoorbellBinarySensorEntityDescription( + key="image capture", + translation_key="image_capture", + value_fn=partial( + retrieve_time_based_activity, {ActivityType.DOORBELL_IMAGE_CAPTURE} + ), + is_time_based=True, + ), + YaleDoorbellBinarySensorEntityDescription( + key="online", + device_class=BinarySensorDeviceClass.CONNECTIVITY, + entity_category=EntityCategory.DIAGNOSTIC, + value_fn=retrieve_online_state, + is_time_based=False, + ), +) + + +SENSOR_TYPES_DOORBELL: tuple[YaleDoorbellBinarySensorEntityDescription, ...] = ( + YaleDoorbellBinarySensorEntityDescription( + key="ding", + translation_key="ding", + device_class=BinarySensorDeviceClass.OCCUPANCY, + value_fn=retrieve_ding_activity, + is_time_based=True, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Yale binary sensors.""" + data = config_entry.runtime_data + entities: list[BinarySensorEntity] = [] + + for lock in data.locks: + detail = data.get_device_detail(lock.device_id) + if detail.doorsense: + entities.append(YaleDoorBinarySensor(data, lock, SENSOR_TYPE_DOOR)) + + if detail.doorbell: + entities.extend( + YaleDoorbellBinarySensor(data, lock, description) + for description in SENSOR_TYPES_DOORBELL + ) + + entities.extend( + YaleDoorbellBinarySensor(data, doorbell, description) + for description in SENSOR_TYPES_DOORBELL + SENSOR_TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) + async_add_entities(entities) + + +class YaleDoorBinarySensor(YaleDescriptionEntity, BinarySensorEntity): + """Representation of an Yale Door binary sensor.""" + + _attr_device_class = BinarySensorDeviceClass.DOOR + description: BinarySensorEntityDescription + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + if door_activity := self._get_latest({ActivityType.DOOR_OPERATION}): + update_lock_detail_from_activity(self._detail, door_activity) + if door_activity.was_pushed: + self._detail.set_online(True) + + if bridge_activity := self._get_latest({ActivityType.BRIDGE_OPERATION}): + update_lock_detail_from_activity(self._detail, bridge_activity) + self._attr_available = self._detail.bridge_is_online + self._attr_is_on = self._detail.door_state == LockDoorStatus.OPEN + + +class YaleDoorbellBinarySensor(YaleDescriptionEntity, BinarySensorEntity): + """Representation of an Yale binary sensor.""" + + entity_description: YaleDoorbellBinarySensorEntityDescription + _check_for_off_update_listener: Callable[[], None] | None = None + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + self._cancel_any_pending_updates() + self._attr_is_on = bool( + self.entity_description.value_fn(self._data, self._detail) + ) + + if self.entity_description.is_time_based: + self._attr_available = retrieve_online_state(self._data, self._detail) + self._schedule_update_to_recheck_turn_off_sensor() + else: + self._attr_available = True + + @callback + def _async_scheduled_update(self, now: datetime) -> None: + """Timer callback for sensor update.""" + self._check_for_off_update_listener = None + self._update_from_data() + if not self.is_on: + self.async_write_ha_state() + + def _schedule_update_to_recheck_turn_off_sensor(self) -> None: + """Schedule an update to recheck the sensor to see if it is ready to turn off.""" + # If the sensor is already off there is nothing to do + if not self.is_on: + return + self._check_for_off_update_listener = async_call_later( + self.hass, TIME_TO_RECHECK_DETECTION, self._async_scheduled_update + ) + + def _cancel_any_pending_updates(self) -> None: + """Cancel any updates to recheck a sensor to see if it is ready to turn off.""" + if not self._check_for_off_update_listener: + return + _LOGGER.debug("%s: canceled pending update", self.entity_id) + self._check_for_off_update_listener() + self._check_for_off_update_listener = None + + async def async_will_remove_from_hass(self) -> None: + """When removing cancel any scheduled updates.""" + self._cancel_any_pending_updates() + await super().async_will_remove_from_hass() diff --git a/homeassistant/components/yale/button.py b/homeassistant/components/yale/button.py new file mode 100644 index 00000000000..b04ad638f0c --- /dev/null +++ b/homeassistant/components/yale/button.py @@ -0,0 +1,32 @@ +"""Support for Yale buttons.""" + +from homeassistant.components.button import ButtonEntity +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry +from .entity import YaleEntity + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale lock wake buttons.""" + data = config_entry.runtime_data + async_add_entities(YaleWakeLockButton(data, lock, "wake") for lock in data.locks) + + +class YaleWakeLockButton(YaleEntity, ButtonEntity): + """Representation of an Yale lock wake button.""" + + _attr_translation_key = "wake" + + async def async_press(self) -> None: + """Wake the device.""" + await self._data.async_status_async(self._device_id, self._hyper_bridge) + + @callback + def _update_from_data(self) -> None: + """Nothing to update as buttons are stateless.""" diff --git a/homeassistant/components/yale/camera.py b/homeassistant/components/yale/camera.py new file mode 100644 index 00000000000..217e8f5f6fd --- /dev/null +++ b/homeassistant/components/yale/camera.py @@ -0,0 +1,90 @@ +"""Support for Yale doorbell camera.""" + +from __future__ import annotations + +import logging + +from aiohttp import ClientSession +from yalexs.activity import ActivityType +from yalexs.doorbell import Doorbell +from yalexs.util import update_doorbell_image_from_activity + +from homeassistant.components.camera import Camera +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import aiohttp_client +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry, YaleData +from .const import DEFAULT_NAME, DEFAULT_TIMEOUT +from .entity import YaleEntity + +_LOGGER = logging.getLogger(__name__) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale cameras.""" + data = config_entry.runtime_data + # Create an aiohttp session instead of using the default one since the + # default one is likely to trigger yale's WAF if another integration + # is also using Cloudflare + session = aiohttp_client.async_create_clientsession(hass) + async_add_entities( + YaleCamera(data, doorbell, session, DEFAULT_TIMEOUT) + for doorbell in data.doorbells + ) + + +class YaleCamera(YaleEntity, Camera): + """An implementation of an Yale security camera.""" + + _attr_translation_key = "camera" + _attr_motion_detection_enabled = True + _attr_brand = DEFAULT_NAME + _image_url: str | None = None + _image_content: bytes | None = None + + def __init__( + self, data: YaleData, device: Doorbell, session: ClientSession, timeout: int + ) -> None: + """Initialize an Yale security camera.""" + super().__init__(data, device, "camera") + self._timeout = timeout + self._session = session + self._attr_model = self._detail.model + + @property + def is_recording(self) -> bool: + """Return true if the device is recording.""" + return self._device.has_subscription + + async def _async_update(self): + """Update device.""" + _LOGGER.debug("async_update called %s", self._detail.device_name) + await self._data.refresh_camera_by_id(self._device_id) + self._update_from_data() + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + if doorbell_activity := self._get_latest( + {ActivityType.DOORBELL_MOTION, ActivityType.DOORBELL_IMAGE_CAPTURE} + ): + update_doorbell_image_from_activity(self._detail, doorbell_activity) + + async def async_camera_image( + self, width: int | None = None, height: int | None = None + ) -> bytes | None: + """Return bytes of camera image.""" + self._update_from_data() + + if self._image_url is not self._detail.image_url: + self._image_content = await self._data.async_get_doorbell_image( + self._device_id, self._session, timeout=self._timeout + ) + self._image_url = self._detail.image_url + + return self._image_content diff --git a/homeassistant/components/yale/config_flow.py b/homeassistant/components/yale/config_flow.py new file mode 100644 index 00000000000..6cbc9543ea4 --- /dev/null +++ b/homeassistant/components/yale/config_flow.py @@ -0,0 +1,60 @@ +"""Config flow for Yale integration.""" + +from collections.abc import Mapping +import logging +from typing import Any + +import jwt + +from homeassistant.config_entries import ConfigEntry, ConfigFlowResult +from homeassistant.helpers import config_entry_oauth2_flow + +from .const import DOMAIN + +_LOGGER = logging.getLogger(__name__) + + +class YaleConfigFlow(config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN): + """Handle a config flow for Yale.""" + + VERSION = 1 + DOMAIN = DOMAIN + reauth_entry: ConfigEntry | None = None + + @property + def logger(self) -> logging.Logger: + """Return logger.""" + return _LOGGER + + async def async_step_reauth( + self, entry_data: Mapping[str, Any] + ) -> ConfigFlowResult: + """Handle configuration by re-auth.""" + self.reauth_entry = self.hass.config_entries.async_get_entry( + self.context["entry_id"] + ) + return await self.async_step_user() + + def _async_get_user_id_from_access_token(self, encoded: str) -> str: + """Get user ID from access token.""" + decoded = jwt.decode( + encoded, + "", + verify=False, + options={"verify_signature": False}, + algorithms=["HS256"], + ) + return decoded["userId"] + + async def async_oauth_create_entry(self, data: dict) -> ConfigFlowResult: + """Create an entry for the flow.""" + user_id = self._async_get_user_id_from_access_token( + data["token"]["access_token"] + ) + if entry := self.reauth_entry: + if entry.unique_id != user_id: + return self.async_abort(reason="reauth_invalid_user") + return self.async_update_reload_and_abort(entry, data=data) + await self.async_set_unique_id(user_id) + self._abort_if_unique_id_configured() + return await super().async_oauth_create_entry(data) diff --git a/homeassistant/components/yale/const.py b/homeassistant/components/yale/const.py new file mode 100644 index 00000000000..3da4fb1dfb4 --- /dev/null +++ b/homeassistant/components/yale/const.py @@ -0,0 +1,43 @@ +"""Constants for Yale devices.""" + +from homeassistant.const import Platform + +DEFAULT_TIMEOUT = 25 + +CONF_ACCESS_TOKEN_CACHE_FILE = "access_token_cache_file" +CONF_BRAND = "brand" +CONF_LOGIN_METHOD = "login_method" +CONF_INSTALL_ID = "install_id" + +VERIFICATION_CODE_KEY = "verification_code" + +MANUFACTURER = "Yale Home Inc." + +DEFAULT_NAME = "Yale" +DOMAIN = "yale" + +OPERATION_METHOD_AUTORELOCK = "autorelock" +OPERATION_METHOD_REMOTE = "remote" +OPERATION_METHOD_KEYPAD = "keypad" +OPERATION_METHOD_MANUAL = "manual" +OPERATION_METHOD_TAG = "tag" +OPERATION_METHOD_MOBILE_DEVICE = "mobile" + +ATTR_OPERATION_AUTORELOCK = "autorelock" +ATTR_OPERATION_METHOD = "method" +ATTR_OPERATION_REMOTE = "remote" +ATTR_OPERATION_KEYPAD = "keypad" +ATTR_OPERATION_MANUAL = "manual" +ATTR_OPERATION_TAG = "tag" + +LOGIN_METHODS = ["phone", "email"] +DEFAULT_LOGIN_METHOD = "email" + +PLATFORMS = [ + Platform.BINARY_SENSOR, + Platform.BUTTON, + Platform.CAMERA, + Platform.EVENT, + Platform.LOCK, + Platform.SENSOR, +] diff --git a/homeassistant/components/yale/data.py b/homeassistant/components/yale/data.py new file mode 100644 index 00000000000..12736f7733d --- /dev/null +++ b/homeassistant/components/yale/data.py @@ -0,0 +1,52 @@ +"""Support for Yale devices.""" + +from __future__ import annotations + +from yalexs.lock import LockDetail +from yalexs.manager.data import YaleXSData +from yalexs_ble import YaleXSBLEDiscovery + +from homeassistant.config_entries import SOURCE_INTEGRATION_DISCOVERY +from homeassistant.core import HomeAssistant, callback +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import discovery_flow + +from .gateway import YaleGateway + +YALEXS_BLE_DOMAIN = "yalexs_ble" + + +@callback +def _async_trigger_ble_lock_discovery( + hass: HomeAssistant, locks_with_offline_keys: list[LockDetail] +) -> None: + """Update keys for the yalexs-ble integration if available.""" + for lock_detail in locks_with_offline_keys: + discovery_flow.async_create_flow( + hass, + YALEXS_BLE_DOMAIN, + context={"source": SOURCE_INTEGRATION_DISCOVERY}, + data=YaleXSBLEDiscovery( + { + "name": lock_detail.device_name, + "address": lock_detail.mac_address, + "serial": lock_detail.serial_number, + "key": lock_detail.offline_key, + "slot": lock_detail.offline_slot, + } + ), + ) + + +class YaleData(YaleXSData): + """yale data object.""" + + def __init__(self, hass: HomeAssistant, yale_gateway: YaleGateway) -> None: + """Init yale data object.""" + self._hass = hass + super().__init__(yale_gateway, HomeAssistantError) + + @callback + def async_offline_key_discovered(self, detail: LockDetail) -> None: + """Handle offline key discovery.""" + _async_trigger_ble_lock_discovery(self._hass, [detail]) diff --git a/homeassistant/components/yale/diagnostics.py b/homeassistant/components/yale/diagnostics.py new file mode 100644 index 00000000000..7e7f6179e7a --- /dev/null +++ b/homeassistant/components/yale/diagnostics.py @@ -0,0 +1,50 @@ +"""Diagnostics support for yale.""" + +from __future__ import annotations + +from typing import Any + +from yalexs.const import Brand + +from homeassistant.components.diagnostics import async_redact_data +from homeassistant.core import HomeAssistant + +from . import YaleConfigEntry + +TO_REDACT = { + "HouseID", + "OfflineKeys", + "installUserID", + "invitations", + "key", + "pins", + "pubsubChannel", + "recentImage", + "remoteOperateSecret", + "users", + "zWaveDSK", + "contentToken", +} + + +async def async_get_config_entry_diagnostics( + hass: HomeAssistant, entry: YaleConfigEntry +) -> dict[str, Any]: + """Return diagnostics for a config entry.""" + data = entry.runtime_data + + return { + "locks": { + lock.device_id: async_redact_data( + data.get_device_detail(lock.device_id).raw, TO_REDACT + ) + for lock in data.locks + }, + "doorbells": { + doorbell.device_id: async_redact_data( + data.get_device_detail(doorbell.device_id).raw, TO_REDACT + ) + for doorbell in data.doorbells + }, + "brand": Brand.YALE_GLOBAL.value, + } diff --git a/homeassistant/components/yale/entity.py b/homeassistant/components/yale/entity.py new file mode 100644 index 00000000000..152070c0be3 --- /dev/null +++ b/homeassistant/components/yale/entity.py @@ -0,0 +1,115 @@ +"""Base class for Yale entity.""" + +from abc import abstractmethod + +from yalexs.activity import Activity, ActivityType +from yalexs.doorbell import Doorbell, DoorbellDetail +from yalexs.keypad import KeypadDetail +from yalexs.lock import Lock, LockDetail +from yalexs.util import get_configuration_url + +from homeassistant.const import ATTR_CONNECTIONS +from homeassistant.core import callback +from homeassistant.helpers import device_registry as dr +from homeassistant.helpers.device_registry import DeviceInfo +from homeassistant.helpers.entity import Entity, EntityDescription + +from . import DOMAIN, YaleData +from .const import MANUFACTURER + +DEVICE_TYPES = ["keypad", "lock", "camera", "doorbell", "door", "bell"] + + +class YaleEntity(Entity): + """Base implementation for Yale device.""" + + _attr_should_poll = False + _attr_has_entity_name = True + + def __init__( + self, data: YaleData, device: Doorbell | Lock | KeypadDetail, unique_id: str + ) -> None: + """Initialize an Yale device.""" + super().__init__() + self._data = data + self._stream = data.activity_stream + self._device = device + detail = self._detail + self._device_id = device.device_id + self._attr_unique_id = f"{device.device_id}_{unique_id}" + self._attr_device_info = DeviceInfo( + identifiers={(DOMAIN, self._device_id)}, + manufacturer=MANUFACTURER, + model=detail.model, + name=device.device_name, + sw_version=detail.firmware_version, + suggested_area=_remove_device_types(device.device_name, DEVICE_TYPES), + configuration_url=get_configuration_url(data.brand), + ) + if isinstance(detail, LockDetail) and (mac := detail.mac_address): + self._attr_device_info[ATTR_CONNECTIONS] = {(dr.CONNECTION_BLUETOOTH, mac)} + + @property + def _detail(self) -> DoorbellDetail | LockDetail: + return self._data.get_device_detail(self._device.device_id) + + @property + def _hyper_bridge(self) -> bool: + """Check if the lock has a paired hyper bridge.""" + return bool(self._detail.bridge and self._detail.bridge.hyper_bridge) + + @callback + def _get_latest(self, activity_types: set[ActivityType]) -> Activity | None: + """Get the latest activity for the device.""" + return self._stream.get_latest_device_activity(self._device_id, activity_types) + + @callback + def _update_from_data_and_write_state(self) -> None: + self._update_from_data() + self.async_write_ha_state() + + @abstractmethod + def _update_from_data(self) -> None: + """Update the entity state from the data object.""" + + async def async_added_to_hass(self) -> None: + """Subscribe to updates.""" + self.async_on_remove( + self._data.async_subscribe_device_id( + self._device_id, self._update_from_data_and_write_state + ) + ) + self.async_on_remove( + self._stream.async_subscribe_device_id( + self._device_id, self._update_from_data_and_write_state + ) + ) + self._update_from_data() + + +class YaleDescriptionEntity(YaleEntity): + """An Yale entity with a description.""" + + def __init__( + self, + data: YaleData, + device: Doorbell | Lock | KeypadDetail, + description: EntityDescription, + ) -> None: + """Initialize an Yale entity with a description.""" + super().__init__(data, device, description.key) + self.entity_description = description + + +def _remove_device_types(name: str, device_types: list[str]) -> str: + """Strip device types from a string. + + Yale stores the name as Master Bed Lock + or Master Bed Door. We can come up with a + reasonable suggestion by removing the supported + device types from the string. + """ + lower_name = name.lower() + for device_type in device_types: + lower_name = lower_name.removesuffix(f" {device_type}") + return name[: len(lower_name)] diff --git a/homeassistant/components/yale/event.py b/homeassistant/components/yale/event.py new file mode 100644 index 00000000000..935ba7376f8 --- /dev/null +++ b/homeassistant/components/yale/event.py @@ -0,0 +1,98 @@ +"""Support for yale events.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import TYPE_CHECKING + +from yalexs.activity import Activity +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail + +from homeassistant.components.event import ( + EventDeviceClass, + EventEntity, + EventEntityDescription, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry, YaleData +from .entity import YaleDescriptionEntity +from .util import ( + retrieve_ding_activity, + retrieve_doorbell_motion_activity, + retrieve_online_state, +) + + +@dataclass(kw_only=True, frozen=True) +class YaleEventEntityDescription(EventEntityDescription): + """Describe yale event entities.""" + + value_fn: Callable[[YaleData, DoorbellDetail | LockDetail], Activity | None] + + +TYPES_VIDEO_DOORBELL: tuple[YaleEventEntityDescription, ...] = ( + YaleEventEntityDescription( + key="motion", + translation_key="motion", + device_class=EventDeviceClass.MOTION, + event_types=["motion"], + value_fn=retrieve_doorbell_motion_activity, + ), +) + + +TYPES_DOORBELL: tuple[YaleEventEntityDescription, ...] = ( + YaleEventEntityDescription( + key="doorbell", + translation_key="doorbell", + device_class=EventDeviceClass.DOORBELL, + event_types=["ring"], + value_fn=retrieve_ding_activity, + ), +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the yale event platform.""" + data = config_entry.runtime_data + entities: list[YaleEventEntity] = [ + YaleEventEntity(data, lock, description) + for description in TYPES_DOORBELL + for lock in data.locks + if (detail := data.get_device_detail(lock.device_id)) and detail.doorbell + ] + entities.extend( + YaleEventEntity(data, doorbell, description) + for description in TYPES_DOORBELL + TYPES_VIDEO_DOORBELL + for doorbell in data.doorbells + ) + async_add_entities(entities) + + +class YaleEventEntity(YaleDescriptionEntity, EventEntity): + """An yale event entity.""" + + entity_description: YaleEventEntityDescription + _last_activity: Activity | None = None + + @callback + def _update_from_data(self) -> None: + """Update from data.""" + self._attr_available = retrieve_online_state(self._data, self._detail) + current_activity = self.entity_description.value_fn(self._data, self._detail) + if not current_activity or current_activity == self._last_activity: + return + self._last_activity = current_activity + event_types = self.entity_description.event_types + if TYPE_CHECKING: + assert event_types is not None + self._trigger_event(event_type=event_types[0]) + self.async_write_ha_state() diff --git a/homeassistant/components/yale/gateway.py b/homeassistant/components/yale/gateway.py new file mode 100644 index 00000000000..cd7796182d2 --- /dev/null +++ b/homeassistant/components/yale/gateway.py @@ -0,0 +1,43 @@ +"""Handle Yale connection setup and authentication.""" + +import logging +from pathlib import Path + +from aiohttp import ClientSession +from yalexs.authenticator_common import Authentication, AuthenticationState +from yalexs.manager.gateway import Gateway + +from homeassistant.helpers import config_entry_oauth2_flow + +_LOGGER = logging.getLogger(__name__) + + +class YaleGateway(Gateway): + """Handle the connection to Yale.""" + + def __init__( + self, + config_path: Path, + aiohttp_session: ClientSession, + oauth_session: config_entry_oauth2_flow.OAuth2Session, + ) -> None: + """Init the connection.""" + super().__init__(config_path, aiohttp_session) + self._oauth_session = oauth_session + + async def async_get_access_token(self) -> str: + """Get access token.""" + await self._oauth_session.async_ensure_token_valid() + return self._oauth_session.token["access_token"] + + async def async_refresh_access_token_if_needed(self) -> None: + """Refresh the access token if needed.""" + await self._oauth_session.async_ensure_token_valid() + + async def async_authenticate(self) -> Authentication: + """Authenticate with the details provided to setup.""" + await self._oauth_session.async_ensure_token_valid() + self.authentication = Authentication( + AuthenticationState.AUTHENTICATED, None, None, None + ) + return self.authentication diff --git a/homeassistant/components/yale/icons.json b/homeassistant/components/yale/icons.json new file mode 100644 index 00000000000..b654b6d912a --- /dev/null +++ b/homeassistant/components/yale/icons.json @@ -0,0 +1,9 @@ +{ + "entity": { + "binary_sensor": { + "image_capture": { + "default": "mdi:file-image" + } + } + } +} diff --git a/homeassistant/components/yale/lock.py b/homeassistant/components/yale/lock.py new file mode 100644 index 00000000000..b911c92ba0f --- /dev/null +++ b/homeassistant/components/yale/lock.py @@ -0,0 +1,147 @@ +"""Support for Yale lock.""" + +from __future__ import annotations + +from collections.abc import Callable, Coroutine +import logging +from typing import Any + +from aiohttp import ClientResponseError +from yalexs.activity import ActivityType, ActivityTypes +from yalexs.lock import Lock, LockStatus +from yalexs.util import get_latest_activity, update_lock_detail_from_activity + +from homeassistant.components.lock import ATTR_CHANGED_BY, LockEntity, LockEntityFeature +from homeassistant.const import ATTR_BATTERY_LEVEL +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.restore_state import RestoreEntity +import homeassistant.util.dt as dt_util + +from . import YaleConfigEntry, YaleData +from .entity import YaleEntity + +_LOGGER = logging.getLogger(__name__) + +LOCK_JAMMED_ERR = 531 + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up Yale locks.""" + data = config_entry.runtime_data + async_add_entities(YaleLock(data, lock) for lock in data.locks) + + +class YaleLock(YaleEntity, RestoreEntity, LockEntity): + """Representation of an Yale lock.""" + + _attr_name = None + _lock_status: LockStatus | None = None + + def __init__(self, data: YaleData, device: Lock) -> None: + """Initialize the lock.""" + super().__init__(data, device, "lock") + if self._detail.unlatch_supported: + self._attr_supported_features = LockEntityFeature.OPEN + + async def async_lock(self, **kwargs: Any) -> None: + """Lock the device.""" + if self._data.push_updates_connected: + await self._data.async_lock_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_lock) + + async def async_open(self, **kwargs: Any) -> None: + """Open/unlatch the device.""" + if self._data.push_updates_connected: + await self._data.async_unlatch_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_unlatch) + + async def async_unlock(self, **kwargs: Any) -> None: + """Unlock the device.""" + if self._data.push_updates_connected: + await self._data.async_unlock_async(self._device_id, self._hyper_bridge) + return + await self._call_lock_operation(self._data.async_unlock) + + async def _call_lock_operation( + self, lock_operation: Callable[[str], Coroutine[Any, Any, list[ActivityTypes]]] + ) -> None: + try: + activities = await lock_operation(self._device_id) + except ClientResponseError as err: + if err.status == LOCK_JAMMED_ERR: + self._detail.lock_status = LockStatus.JAMMED + self._detail.lock_status_datetime = dt_util.utcnow() + else: + raise + else: + for lock_activity in activities: + update_lock_detail_from_activity(self._detail, lock_activity) + + if self._update_lock_status_from_detail(): + _LOGGER.debug( + "async_signal_device_id_update (from lock operation): %s", + self._device_id, + ) + self._data.async_signal_device_id_update(self._device_id) + + def _update_lock_status_from_detail(self) -> bool: + self._attr_available = self._detail.bridge_is_online + + if self._lock_status != self._detail.lock_status: + self._lock_status = self._detail.lock_status + return True + return False + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + detail = self._detail + if lock_activity := self._get_latest({ActivityType.LOCK_OPERATION}): + self._attr_changed_by = lock_activity.operated_by + lock_activity_without_operator = self._get_latest( + {ActivityType.LOCK_OPERATION_WITHOUT_OPERATOR} + ) + if latest_activity := get_latest_activity( + lock_activity_without_operator, lock_activity + ): + if latest_activity.was_pushed: + self._detail.set_online(True) + update_lock_detail_from_activity(detail, latest_activity) + + if bridge_activity := self._get_latest({ActivityType.BRIDGE_OPERATION}): + update_lock_detail_from_activity(detail, bridge_activity) + + self._update_lock_status_from_detail() + lock_status = self._lock_status + if lock_status is None or lock_status is LockStatus.UNKNOWN: + self._attr_is_locked = None + else: + self._attr_is_locked = lock_status is LockStatus.LOCKED + self._attr_is_jammed = lock_status is LockStatus.JAMMED + self._attr_is_locking = lock_status is LockStatus.LOCKING + self._attr_is_unlocking = lock_status in ( + LockStatus.UNLOCKING, + LockStatus.UNLATCHING, + ) + self._attr_extra_state_attributes = {ATTR_BATTERY_LEVEL: detail.battery_level} + if keypad := detail.keypad: + self._attr_extra_state_attributes["keypad_battery_level"] = ( + keypad.battery_level + ) + + async def async_added_to_hass(self) -> None: + """Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log.""" + await super().async_added_to_hass() + + if not (last_state := await self.async_get_last_state()): + return + + if ATTR_CHANGED_BY in last_state.attributes: + self._attr_changed_by = last_state.attributes[ATTR_CHANGED_BY] diff --git a/homeassistant/components/yale/manifest.json b/homeassistant/components/yale/manifest.json new file mode 100644 index 00000000000..fc93d259891 --- /dev/null +++ b/homeassistant/components/yale/manifest.json @@ -0,0 +1,17 @@ +{ + "domain": "yale", + "name": "Yale", + "codeowners": ["@bdraco"], + "config_flow": true, + "dependencies": ["application_credentials", "cloud"], + "dhcp": [ + { + "hostname": "yale-connect-plus", + "macaddress": "00177A*" + } + ], + "documentation": "https://www.home-assistant.io/integrations/yale", + "iot_class": "cloud_push", + "loggers": ["socketio", "engineio", "yalexs"], + "requirements": ["yalexs==8.6.3", "yalexs-ble==2.4.3"] +} diff --git a/homeassistant/components/yale/sensor.py b/homeassistant/components/yale/sensor.py new file mode 100644 index 00000000000..bb3d4317277 --- /dev/null +++ b/homeassistant/components/yale/sensor.py @@ -0,0 +1,211 @@ +"""Support for Yale sensors.""" + +from __future__ import annotations + +from collections.abc import Callable +from dataclasses import dataclass +from typing import Any, cast + +from yalexs.activity import ActivityType, LockOperationActivity +from yalexs.doorbell import Doorbell +from yalexs.keypad import KeypadDetail +from yalexs.lock import LockDetail + +from homeassistant.components.sensor import ( + RestoreSensor, + SensorDeviceClass, + SensorEntity, + SensorEntityDescription, + SensorStateClass, +) +from homeassistant.const import ( + ATTR_ENTITY_PICTURE, + PERCENTAGE, + STATE_UNAVAILABLE, + EntityCategory, +) +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers.entity_platform import AddEntitiesCallback + +from . import YaleConfigEntry +from .const import ( + ATTR_OPERATION_AUTORELOCK, + ATTR_OPERATION_KEYPAD, + ATTR_OPERATION_MANUAL, + ATTR_OPERATION_METHOD, + ATTR_OPERATION_REMOTE, + ATTR_OPERATION_TAG, + OPERATION_METHOD_AUTORELOCK, + OPERATION_METHOD_KEYPAD, + OPERATION_METHOD_MANUAL, + OPERATION_METHOD_MOBILE_DEVICE, + OPERATION_METHOD_REMOTE, + OPERATION_METHOD_TAG, +) +from .entity import YaleDescriptionEntity, YaleEntity + + +def _retrieve_device_battery_state(detail: LockDetail) -> int: + """Get the latest state of the sensor.""" + return detail.battery_level + + +def _retrieve_linked_keypad_battery_state(detail: KeypadDetail) -> int | None: + """Get the latest state of the sensor.""" + return detail.battery_percentage + + +@dataclass(frozen=True, kw_only=True) +class YaleSensorEntityDescription[T: LockDetail | KeypadDetail]( + SensorEntityDescription +): + """Mixin for required keys.""" + + value_fn: Callable[[T], int | None] + + +SENSOR_TYPE_DEVICE_BATTERY = YaleSensorEntityDescription[LockDetail]( + key="device_battery", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=_retrieve_device_battery_state, +) + +SENSOR_TYPE_KEYPAD_BATTERY = YaleSensorEntityDescription[KeypadDetail]( + key="linked_keypad_battery", + entity_category=EntityCategory.DIAGNOSTIC, + state_class=SensorStateClass.MEASUREMENT, + value_fn=_retrieve_linked_keypad_battery_state, +) + + +async def async_setup_entry( + hass: HomeAssistant, + config_entry: YaleConfigEntry, + async_add_entities: AddEntitiesCallback, +) -> None: + """Set up the Yale sensors.""" + data = config_entry.runtime_data + entities: list[SensorEntity] = [] + + for device in data.locks: + detail = data.get_device_detail(device.device_id) + entities.append(YaleOperatorSensor(data, device, "lock_operator")) + if SENSOR_TYPE_DEVICE_BATTERY.value_fn(detail): + entities.append( + YaleBatterySensor[LockDetail](data, device, SENSOR_TYPE_DEVICE_BATTERY) + ) + if keypad := detail.keypad: + entities.append( + YaleBatterySensor[KeypadDetail]( + data, keypad, SENSOR_TYPE_KEYPAD_BATTERY + ) + ) + + entities.extend( + YaleBatterySensor[Doorbell](data, device, SENSOR_TYPE_DEVICE_BATTERY) + for device in data.doorbells + if SENSOR_TYPE_DEVICE_BATTERY.value_fn(data.get_device_detail(device.device_id)) + ) + + async_add_entities(entities) + + +class YaleOperatorSensor(YaleEntity, RestoreSensor): + """Representation of an Yale lock operation sensor.""" + + _attr_translation_key = "operator" + _operated_remote: bool | None = None + _operated_keypad: bool | None = None + _operated_manual: bool | None = None + _operated_tag: bool | None = None + _operated_autorelock: bool | None = None + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor and update activity.""" + self._attr_available = True + if lock_activity := self._get_latest({ActivityType.LOCK_OPERATION}): + lock_activity = cast(LockOperationActivity, lock_activity) + self._attr_native_value = lock_activity.operated_by + self._operated_remote = lock_activity.operated_remote + self._operated_keypad = lock_activity.operated_keypad + self._operated_manual = lock_activity.operated_manual + self._operated_tag = lock_activity.operated_tag + self._operated_autorelock = lock_activity.operated_autorelock + self._attr_entity_picture = lock_activity.operator_thumbnail_url + + @property + def extra_state_attributes(self) -> dict[str, Any]: + """Return the device specific state attributes.""" + attributes: dict[str, Any] = {} + + if self._operated_remote is not None: + attributes[ATTR_OPERATION_REMOTE] = self._operated_remote + if self._operated_keypad is not None: + attributes[ATTR_OPERATION_KEYPAD] = self._operated_keypad + if self._operated_manual is not None: + attributes[ATTR_OPERATION_MANUAL] = self._operated_manual + if self._operated_tag is not None: + attributes[ATTR_OPERATION_TAG] = self._operated_tag + if self._operated_autorelock is not None: + attributes[ATTR_OPERATION_AUTORELOCK] = self._operated_autorelock + + if self._operated_remote: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_REMOTE + elif self._operated_keypad: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_KEYPAD + elif self._operated_manual: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MANUAL + elif self._operated_tag: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_TAG + elif self._operated_autorelock: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_AUTORELOCK + else: + attributes[ATTR_OPERATION_METHOD] = OPERATION_METHOD_MOBILE_DEVICE + + return attributes + + async def async_added_to_hass(self) -> None: + """Restore ATTR_CHANGED_BY on startup since it is likely no longer in the activity log.""" + await super().async_added_to_hass() + + last_state = await self.async_get_last_state() + last_sensor_state = await self.async_get_last_sensor_data() + if ( + not last_state + or not last_sensor_state + or last_state.state == STATE_UNAVAILABLE + ): + return + + self._attr_native_value = last_sensor_state.native_value + last_attrs = last_state.attributes + if ATTR_ENTITY_PICTURE in last_attrs: + self._attr_entity_picture = last_attrs[ATTR_ENTITY_PICTURE] + if ATTR_OPERATION_REMOTE in last_attrs: + self._operated_remote = last_attrs[ATTR_OPERATION_REMOTE] + if ATTR_OPERATION_KEYPAD in last_attrs: + self._operated_keypad = last_attrs[ATTR_OPERATION_KEYPAD] + if ATTR_OPERATION_MANUAL in last_attrs: + self._operated_manual = last_attrs[ATTR_OPERATION_MANUAL] + if ATTR_OPERATION_TAG in last_attrs: + self._operated_tag = last_attrs[ATTR_OPERATION_TAG] + if ATTR_OPERATION_AUTORELOCK in last_attrs: + self._operated_autorelock = last_attrs[ATTR_OPERATION_AUTORELOCK] + + +class YaleBatterySensor[T: LockDetail | KeypadDetail]( + YaleDescriptionEntity, SensorEntity +): + """Representation of an Yale sensor.""" + + entity_description: YaleSensorEntityDescription[T] + _attr_device_class = SensorDeviceClass.BATTERY + _attr_native_unit_of_measurement = PERCENTAGE + + @callback + def _update_from_data(self) -> None: + """Get the latest state of the sensor.""" + self._attr_native_value = self.entity_description.value_fn(self._detail) + self._attr_available = self._attr_native_value is not None diff --git a/homeassistant/components/yale/strings.json b/homeassistant/components/yale/strings.json new file mode 100644 index 00000000000..3fb1345a3b0 --- /dev/null +++ b/homeassistant/components/yale/strings.json @@ -0,0 +1,71 @@ +{ + "config": { + "step": { + "pick_implementation": { + "title": "[%key:common::config_flow::title::oauth2_pick_implementation%]" + } + }, + "abort": { + "already_configured": "[%key:common::config_flow::abort::already_configured_account%]", + "already_in_progress": "[%key:common::config_flow::abort::already_in_progress%]", + "oauth_error": "[%key:common::config_flow::abort::oauth2_error%]", + "missing_configuration": "[%key:common::config_flow::abort::oauth2_missing_configuration%]", + "authorize_url_timeout": "[%key:common::config_flow::abort::oauth2_authorize_url_timeout%]", + "no_url_available": "[%key:common::config_flow::abort::oauth2_no_url_available%]", + "user_rejected_authorize": "[%key:common::config_flow::abort::oauth2_user_rejected_authorize%]", + "reauth_successful": "[%key:common::config_flow::abort::reauth_successful%]", + "oauth_timeout": "[%key:common::config_flow::abort::oauth2_timeout%]", + "oauth_unauthorized": "[%key:common::config_flow::abort::oauth2_unauthorized%]", + "oauth_failed": "[%key:common::config_flow::abort::oauth2_failed%]", + "reauth_invalid_user": "Reauthenticate must use the same account." + }, + "create_entry": { + "default": "[%key:common::config_flow::create_entry::authenticated%]" + } + }, + "entity": { + "binary_sensor": { + "ding": { + "name": "Doorbell ding" + }, + "image_capture": { + "name": "Image capture" + } + }, + "button": { + "wake": { + "name": "Wake" + } + }, + "camera": { + "camera": { + "name": "[%key:component::camera::title%]" + } + }, + "sensor": { + "operator": { + "name": "Operator" + } + }, + "event": { + "doorbell": { + "state_attributes": { + "event_type": { + "state": { + "ring": "Ring" + } + } + } + }, + "motion": { + "state_attributes": { + "event_type": { + "state": { + "motion": "Motion" + } + } + } + } + } + } +} diff --git a/homeassistant/components/yale/util.py b/homeassistant/components/yale/util.py new file mode 100644 index 00000000000..3462c576fd9 --- /dev/null +++ b/homeassistant/components/yale/util.py @@ -0,0 +1,78 @@ +"""Yale util functions.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from functools import partial + +import aiohttp +from yalexs.activity import ACTION_DOORBELL_CALL_MISSED, Activity, ActivityType +from yalexs.doorbell import DoorbellDetail +from yalexs.lock import LockDetail +from yalexs.manager.const import ACTIVITY_UPDATE_INTERVAL + +from homeassistant.core import HomeAssistant, callback +from homeassistant.helpers import aiohttp_client + +from . import YaleData + +TIME_TO_DECLARE_DETECTION = timedelta(seconds=ACTIVITY_UPDATE_INTERVAL.total_seconds()) + + +@callback +def async_create_yale_clientsession(hass: HomeAssistant) -> aiohttp.ClientSession: + """Create an aiohttp session for the yale integration.""" + # Create an aiohttp session instead of using the default one since the + # default one is likely to trigger yale's WAF if another integration + # is also using Cloudflare + return aiohttp_client.async_create_clientsession(hass) + + +def retrieve_time_based_activity( + activities: set[ActivityType], data: YaleData, detail: DoorbellDetail | LockDetail +) -> Activity | None: + """Get the latest state of the sensor.""" + stream = data.activity_stream + if latest := stream.get_latest_device_activity(detail.device_id, activities): + return _activity_time_based(latest) + return False + + +_RING_ACTIVITIES = {ActivityType.DOORBELL_DING} + + +def retrieve_ding_activity( + data: YaleData, detail: DoorbellDetail | LockDetail +) -> Activity | None: + """Get the ring/ding state.""" + stream = data.activity_stream + latest = stream.get_latest_device_activity(detail.device_id, _RING_ACTIVITIES) + if latest is None or ( + data.push_updates_connected and latest.action == ACTION_DOORBELL_CALL_MISSED + ): + return None + return _activity_time_based(latest) + + +retrieve_doorbell_motion_activity = partial( + retrieve_time_based_activity, {ActivityType.DOORBELL_MOTION} +) + + +def _activity_time_based(latest: Activity) -> Activity | None: + """Get the latest state of the sensor.""" + start = latest.activity_start_time + end = latest.activity_end_time + TIME_TO_DECLARE_DETECTION + if start <= datetime.now() <= end: + return latest + return None + + +def retrieve_online_state(data: YaleData, detail: DoorbellDetail | LockDetail) -> bool: + """Get the latest state of the sensor.""" + # The doorbell will go into standby mode when there is no motion + # for a short while. It will wake by itself when needed so we need + # to consider is available or we will not report motion or dings + if isinstance(detail, DoorbellDetail): + return detail.is_online or detail.is_standby + return detail.bridge_is_online diff --git a/homeassistant/components/yale_home/manifest.json b/homeassistant/components/yale_home/manifest.json index 0e45b0da7d0..c497fa3fe34 100644 --- a/homeassistant/components/yale_home/manifest.json +++ b/homeassistant/components/yale_home/manifest.json @@ -2,5 +2,5 @@ "domain": "yale_home", "name": "Yale Home", "integration_type": "virtual", - "supported_by": "august" + "supported_by": "yale" } diff --git a/homeassistant/components/yale_smart_alarm/__init__.py b/homeassistant/components/yale_smart_alarm/__init__.py index 1ef68d98a13..3c853afb6fd 100644 --- a/homeassistant/components/yale_smart_alarm/__init__.py +++ b/homeassistant/components/yale_smart_alarm/__init__.py @@ -6,7 +6,6 @@ from homeassistant.components.lock import CONF_DEFAULT_CODE, DOMAIN as LOCK_DOMA from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_CODE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ConfigEntryAuthFailed from homeassistant.helpers import entity_registry as er from .const import LOGGER, PLATFORMS @@ -19,9 +18,6 @@ async def async_setup_entry(hass: HomeAssistant, entry: YaleConfigEntry) -> bool """Set up Yale from a config entry.""" coordinator = YaleDataUpdateCoordinator(hass, entry) - if not await hass.async_add_executor_job(coordinator.get_updates): - raise ConfigEntryAuthFailed - await coordinator.async_config_entry_first_refresh() entry.runtime_data = coordinator diff --git a/homeassistant/components/yale_smart_alarm/coordinator.py b/homeassistant/components/yale_smart_alarm/coordinator.py index 5307e166e17..1067b9279a4 100644 --- a/homeassistant/components/yale_smart_alarm/coordinator.py +++ b/homeassistant/components/yale_smart_alarm/coordinator.py @@ -20,10 +20,11 @@ from .const import DEFAULT_SCAN_INTERVAL, DOMAIN, LOGGER, YALE_BASE_ERRORS class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): """A Yale Data Update Coordinator.""" + yale: YaleSmartAlarmClient + def __init__(self, hass: HomeAssistant, entry: ConfigEntry) -> None: """Initialize the Yale hub.""" self.entry = entry - self.yale: YaleSmartAlarmClient | None = None super().__init__( hass, LOGGER, @@ -32,6 +33,17 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): always_update=False, ) + async def _async_setup(self) -> None: + """Set up connection to Yale.""" + try: + self.yale = YaleSmartAlarmClient( + self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] + ) + except AuthenticationError as error: + raise ConfigEntryAuthFailed from error + except YALE_BASE_ERRORS as error: + raise UpdateFailed from error + async def _async_update_data(self) -> dict[str, Any]: """Fetch data from Yale.""" @@ -132,25 +144,9 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): def get_updates(self) -> dict[str, Any]: """Fetch data from Yale.""" - - if self.yale is None: - try: - self.yale = YaleSmartAlarmClient( - self.entry.data[CONF_USERNAME], self.entry.data[CONF_PASSWORD] - ) - except AuthenticationError as error: - raise ConfigEntryAuthFailed from error - except YALE_BASE_ERRORS as error: - raise UpdateFailed from error - try: arm_status = self.yale.get_armed_status() - data = self.yale.get_all() - cycle = data["CYCLE"] - status = data["STATUS"] - online = data["ONLINE"] - panel_info = data["PANEL INFO"] - + data = self.yale.get_information() except AuthenticationError as error: raise ConfigEntryAuthFailed from error except YALE_BASE_ERRORS as error: @@ -158,8 +154,8 @@ class YaleDataUpdateCoordinator(DataUpdateCoordinator[dict[str, Any]]): return { "arm_status": arm_status, - "cycle": cycle, - "status": status, - "online": online, - "panel_info": panel_info, + "cycle": data.cycle, + "status": data.status, + "online": data.online, + "panel_info": data.panel_info, } diff --git a/homeassistant/components/yale_smart_alarm/diagnostics.py b/homeassistant/components/yale_smart_alarm/diagnostics.py index 82d2ca9a915..eb7b2be9fb4 100644 --- a/homeassistant/components/yale_smart_alarm/diagnostics.py +++ b/homeassistant/components/yale_smart_alarm/diagnostics.py @@ -2,6 +2,7 @@ from __future__ import annotations +from dataclasses import asdict from typing import Any from homeassistant.components.diagnostics import async_redact_data @@ -29,4 +30,4 @@ async def async_get_config_entry_diagnostics( assert coordinator.yale get_all_data = await hass.async_add_executor_job(coordinator.yale.get_all) - return async_redact_data(get_all_data, TO_REDACT) + return async_redact_data(asdict(get_all_data), TO_REDACT) diff --git a/homeassistant/components/yale_smart_alarm/lock.py b/homeassistant/components/yale_smart_alarm/lock.py index 3b4d0a19039..386e546afbf 100644 --- a/homeassistant/components/yale_smart_alarm/lock.py +++ b/homeassistant/components/yale_smart_alarm/lock.py @@ -7,7 +7,7 @@ from typing import TYPE_CHECKING, Any from homeassistant.components.lock import LockEntity from homeassistant.const import ATTR_CODE from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers.entity_platform import AddEntitiesCallback from . import YaleConfigEntry @@ -61,17 +61,22 @@ class YaleDoorlock(YaleEntity, LockEntity): """Set lock.""" if TYPE_CHECKING: assert self.coordinator.yale, "Connection to API is missing" + if command == "unlocked" and not code: + raise ServiceValidationError( + translation_domain=DOMAIN, + translation_key="no_code", + ) try: get_lock = await self.hass.async_add_executor_job( self.coordinator.yale.lock_api.get, self.lock_name ) - if command == "locked": + if get_lock and command == "locked": lock_state = await self.hass.async_add_executor_job( self.coordinator.yale.lock_api.close_lock, get_lock, ) - if command == "unlocked": + if code and get_lock and command == "unlocked": lock_state = await self.hass.async_add_executor_job( self.coordinator.yale.lock_api.open_lock, get_lock, code ) diff --git a/homeassistant/components/yale_smart_alarm/manifest.json b/homeassistant/components/yale_smart_alarm/manifest.json index ed494505bae..92dd774d1d9 100644 --- a/homeassistant/components/yale_smart_alarm/manifest.json +++ b/homeassistant/components/yale_smart_alarm/manifest.json @@ -6,5 +6,5 @@ "documentation": "https://www.home-assistant.io/integrations/yale_smart_alarm", "iot_class": "cloud_polling", "loggers": ["yalesmartalarmclient"], - "requirements": ["yalesmartalarmclient==0.3.9"] + "requirements": ["yalesmartalarmclient==0.4.0"] } diff --git a/homeassistant/components/yale_smart_alarm/strings.json b/homeassistant/components/yale_smart_alarm/strings.json index ce89c9e69ea..63260c03e7f 100644 --- a/homeassistant/components/yale_smart_alarm/strings.json +++ b/homeassistant/components/yale_smart_alarm/strings.json @@ -67,6 +67,9 @@ "set_lock": { "message": "Could not set lock for {name}: {error}" }, + "no_code": { + "message": "Can not unlock without code" + }, "could_not_change_lock": { "message": "Could not set lock, check system ready for lock" }, diff --git a/homeassistant/components/yamaha/icons.json b/homeassistant/components/yamaha/icons.json index f7075508b0d..40eceda3b3e 100644 --- a/homeassistant/components/yamaha/icons.json +++ b/homeassistant/components/yamaha/icons.json @@ -1,7 +1,13 @@ { "services": { - "enable_output": "mdi:audio-input-stereo-minijack", - "menu_cursor": "mdi:cursor-default", - "select_scene": "mdi:palette" + "enable_output": { + "service": "mdi:audio-input-stereo-minijack" + }, + "menu_cursor": { + "service": "mdi:cursor-default" + }, + "select_scene": { + "service": "mdi:palette" + } } } diff --git a/homeassistant/components/yamaha/media_player.py b/homeassistant/components/yamaha/media_player.py index fd47bcec041..58f501b99be 100644 --- a/homeassistant/components/yamaha/media_player.py +++ b/homeassistant/components/yamaha/media_player.py @@ -254,19 +254,6 @@ class YamahaDeviceZone(MediaPlayerEntity): # the default name of the integration may not be changed # to avoid a breaking change. self._attr_unique_id = f"{self.zctrl.serial_number}_{self._zone}" - _LOGGER.debug( - "Receiver zone: %s zone %s uid %s", - self._name, - self._zone, - self._attr_unique_id, - ) - else: - _LOGGER.info( - "Receiver zone: %s zone %s no uid %s", - self._name, - self._zone, - self._attr_unique_id, - ) def update(self) -> None: """Get the latest details from the device.""" diff --git a/homeassistant/components/yardian/icons.json b/homeassistant/components/yardian/icons.json index 79bcc32adf2..4ca3d83bd15 100644 --- a/homeassistant/components/yardian/icons.json +++ b/homeassistant/components/yardian/icons.json @@ -7,6 +7,8 @@ } }, "services": { - "start_irrigation": "mdi:water" + "start_irrigation": { + "service": "mdi:water" + } } } diff --git a/homeassistant/components/yeelight/config_flow.py b/homeassistant/components/yeelight/config_flow.py index d7bf4e25996..1b36fba59df 100644 --- a/homeassistant/components/yeelight/config_flow.py +++ b/homeassistant/components/yeelight/config_flow.py @@ -3,6 +3,7 @@ from __future__ import annotations import logging +from typing import TYPE_CHECKING, Any from urllib.parse import urlparse import voluptuous as vol @@ -57,11 +58,11 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): """Return the options flow.""" return OptionsFlowHandler(config_entry) - def __init__(self): + def __init__(self) -> None: """Initialize the config flow.""" - self._discovered_devices = {} + self._discovered_devices: dict[str, Any] = {} self._discovered_model = None - self._discovered_ip = None + self._discovered_ip: str | None = None async def async_step_homekit( self, discovery_info: zeroconf.ZeroconfServiceInfo @@ -162,7 +163,9 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): step_id="discovery_confirm", description_placeholders=placeholders ) - async def async_step_user(self, user_input=None): + async def async_step_user( + self, user_input: dict[str, Any] | None = None + ) -> ConfigFlowResult: """Handle the initial step.""" errors = {} if user_input is not None: @@ -176,6 +179,8 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): errors["base"] = "cannot_connect" else: self._abort_if_unique_id_configured() + if TYPE_CHECKING: + assert self.unique_id return self.async_create_entry( title=async_format_model_id(model, self.unique_id), data={ @@ -239,21 +244,21 @@ class YeelightConfigFlow(ConfigFlow, domain=DOMAIN): data_schema=vol.Schema({vol.Required(CONF_DEVICE): vol.In(devices_name)}), ) - async def async_step_import(self, user_input=None): + async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult: """Handle import step.""" - host = user_input[CONF_HOST] + host = import_data[CONF_HOST] try: await self._async_try_connect(host, raise_on_progress=False) except CannotConnect: _LOGGER.error("Failed to import %s: cannot connect", host) return self.async_abort(reason="cannot_connect") - if CONF_NIGHTLIGHT_SWITCH_TYPE in user_input: - user_input[CONF_NIGHTLIGHT_SWITCH] = ( - user_input.pop(CONF_NIGHTLIGHT_SWITCH_TYPE) + if CONF_NIGHTLIGHT_SWITCH_TYPE in import_data: + import_data[CONF_NIGHTLIGHT_SWITCH] = ( + import_data.pop(CONF_NIGHTLIGHT_SWITCH_TYPE) == NIGHTLIGHT_SWITCH_TYPE_LIGHT ) self._abort_if_unique_id_configured() - return self.async_create_entry(title=user_input[CONF_NAME], data=user_input) + return self.async_create_entry(title=import_data[CONF_NAME], data=import_data) async def _async_try_connect(self, host, raise_on_progress=True): """Set up with options.""" diff --git a/homeassistant/components/yeelight/icons.json b/homeassistant/components/yeelight/icons.json index bf0d0c497f0..898637e752c 100644 --- a/homeassistant/components/yeelight/icons.json +++ b/homeassistant/components/yeelight/icons.json @@ -7,13 +7,29 @@ } }, "services": { - "set_mode": "mdi:cog", - "set_color_scene": "mdi:palette", - "set_hsv_scene": "mdi:palette", - "set_color_temp_scene": "mdi:palette", - "set_color_flow_scene": "mdi:palette", - "set_auto_delay_off_scene": "mdi:timer", - "start_flow": "mdi:play", - "set_music_mode": "mdi:music" + "set_mode": { + "service": "mdi:cog" + }, + "set_color_scene": { + "service": "mdi:palette" + }, + "set_hsv_scene": { + "service": "mdi:palette" + }, + "set_color_temp_scene": { + "service": "mdi:palette" + }, + "set_color_flow_scene": { + "service": "mdi:palette" + }, + "set_auto_delay_off_scene": { + "service": "mdi:timer" + }, + "start_flow": { + "service": "mdi:play" + }, + "set_music_mode": { + "service": "mdi:music" + } } } diff --git a/homeassistant/components/yeelight/scanner.py b/homeassistant/components/yeelight/scanner.py index 6ca12e9bd01..ac482504880 100644 --- a/homeassistant/components/yeelight/scanner.py +++ b/homeassistant/components/yeelight/scanner.py @@ -67,7 +67,8 @@ class YeelightScanner: async def async_setup(self) -> None: """Set up the scanner.""" if self._setup_future is not None: - return await self._setup_future + await self._setup_future + return self._setup_future = self._hass.loop.create_future() connected_futures: list[asyncio.Future[None]] = [] diff --git a/homeassistant/components/yolink/const.py b/homeassistant/components/yolink/const.py index 686160d9248..217dd66d063 100644 --- a/homeassistant/components/yolink/const.py +++ b/homeassistant/components/yolink/const.py @@ -23,3 +23,11 @@ DEV_MODEL_TH_SENSOR_YS8014_UC = "YS8014-UC" DEV_MODEL_TH_SENSOR_YS8014_EC = "YS8014-EC" DEV_MODEL_TH_SENSOR_YS8017_UC = "YS8017-UC" DEV_MODEL_TH_SENSOR_YS8017_EC = "YS8017-EC" +DEV_MODEL_FLEX_FOB_YS3604_UC = "YS3604-UC" +DEV_MODEL_FLEX_FOB_YS3604_EC = "YS3604-EC" +DEV_MODEL_FLEX_FOB_YS3614_UC = "YS3614-UC" +DEV_MODEL_FLEX_FOB_YS3614_EC = "YS3614-EC" +DEV_MODEL_PLUG_YS6602_UC = "YS6602-UC" +DEV_MODEL_PLUG_YS6602_EC = "YS6602-EC" +DEV_MODEL_PLUG_YS6803_UC = "YS6803-UC" +DEV_MODEL_PLUG_YS6803_EC = "YS6803-EC" diff --git a/homeassistant/components/yolink/device_trigger.py b/homeassistant/components/yolink/device_trigger.py index b7f83623be5..6e247bf858e 100644 --- a/homeassistant/components/yolink/device_trigger.py +++ b/homeassistant/components/yolink/device_trigger.py @@ -16,6 +16,12 @@ from homeassistant.helpers.trigger import TriggerActionType, TriggerInfo from homeassistant.helpers.typing import ConfigType from . import DOMAIN, YOLINK_EVENT +from .const import ( + DEV_MODEL_FLEX_FOB_YS3604_EC, + DEV_MODEL_FLEX_FOB_YS3604_UC, + DEV_MODEL_FLEX_FOB_YS3614_EC, + DEV_MODEL_FLEX_FOB_YS3614_UC, +) CONF_BUTTON_1 = "button_1" CONF_BUTTON_2 = "button_2" @@ -24,7 +30,7 @@ CONF_BUTTON_4 = "button_4" CONF_SHORT_PRESS = "short_press" CONF_LONG_PRESS = "long_press" -REMOTE_TRIGGER_TYPES = { +FLEX_FOB_4_BUTTONS = { f"{CONF_BUTTON_1}_{CONF_SHORT_PRESS}", f"{CONF_BUTTON_1}_{CONF_LONG_PRESS}", f"{CONF_BUTTON_2}_{CONF_SHORT_PRESS}", @@ -35,14 +41,24 @@ REMOTE_TRIGGER_TYPES = { f"{CONF_BUTTON_4}_{CONF_LONG_PRESS}", } +FLEX_FOB_2_BUTTONS = { + f"{CONF_BUTTON_1}_{CONF_SHORT_PRESS}", + f"{CONF_BUTTON_1}_{CONF_LONG_PRESS}", + f"{CONF_BUTTON_2}_{CONF_SHORT_PRESS}", + f"{CONF_BUTTON_2}_{CONF_LONG_PRESS}", +} + TRIGGER_SCHEMA = DEVICE_TRIGGER_BASE_SCHEMA.extend( - {vol.Required(CONF_TYPE): vol.In(REMOTE_TRIGGER_TYPES)} + {vol.Required(CONF_TYPE): vol.In(FLEX_FOB_4_BUTTONS)} ) -# YoLink Remotes YS3604/YS3605/YS3606/YS3607 -DEVICE_TRIGGER_TYPES: dict[str, set[str]] = { - ATTR_DEVICE_SMART_REMOTER: REMOTE_TRIGGER_TYPES, +# YoLink Remotes YS3604/YS3614 +FLEX_FOB_TRIGGER_TYPES: dict[str, set[str]] = { + DEV_MODEL_FLEX_FOB_YS3604_EC: FLEX_FOB_4_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3604_UC: FLEX_FOB_4_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3614_UC: FLEX_FOB_2_BUTTONS, + DEV_MODEL_FLEX_FOB_YS3614_EC: FLEX_FOB_2_BUTTONS, } @@ -54,7 +70,8 @@ async def async_get_triggers( registry_device = device_registry.async_get(device_id) if not registry_device or registry_device.model != ATTR_DEVICE_SMART_REMOTER: return [] - + if registry_device.model_id not in list(FLEX_FOB_TRIGGER_TYPES.keys()): + return [] return [ { CONF_DEVICE_ID: device_id, @@ -62,7 +79,7 @@ async def async_get_triggers( CONF_PLATFORM: "device", CONF_TYPE: trigger, } - for trigger in DEVICE_TRIGGER_TYPES[ATTR_DEVICE_SMART_REMOTER] + for trigger in FLEX_FOB_TRIGGER_TYPES[registry_device.model_id] ] diff --git a/homeassistant/components/yolink/entity.py b/homeassistant/components/yolink/entity.py index d9ca2968493..0f500b72404 100644 --- a/homeassistant/components/yolink/entity.py +++ b/homeassistant/components/yolink/entity.py @@ -55,6 +55,7 @@ class YoLinkEntity(CoordinatorEntity[YoLinkCoordinator]): identifiers={(DOMAIN, self.coordinator.device.device_id)}, manufacturer=MANUFACTURER, model=self.coordinator.device.device_type, + model_id=self.coordinator.device.device_model_name, name=self.coordinator.device.device_name, ) diff --git a/homeassistant/components/yolink/icons.json b/homeassistant/components/yolink/icons.json index ee9037c864a..c58d219a2e0 100644 --- a/homeassistant/components/yolink/icons.json +++ b/homeassistant/components/yolink/icons.json @@ -17,6 +17,9 @@ }, "power_failure_alarm_beep": { "default": "mdi:bullhorn" + }, + "water_meter_reading": { + "default": "mdi:gauge" } }, "switch": { @@ -26,6 +29,8 @@ } }, "services": { - "play_on_speaker_hub": "mdi:speaker" + "play_on_speaker_hub": { + "service": "mdi:speaker" + } } } diff --git a/homeassistant/components/yolink/lock.py b/homeassistant/components/yolink/lock.py index 177a8808de1..d675fd8cf06 100644 --- a/homeassistant/components/yolink/lock.py +++ b/homeassistant/components/yolink/lock.py @@ -1,11 +1,11 @@ -"""YoLink Lock.""" +"""YoLink Lock V1/V2.""" from __future__ import annotations from typing import Any from yolink.client_request import ClientRequest -from yolink.const import ATTR_DEVICE_LOCK +from yolink.const import ATTR_DEVICE_LOCK, ATTR_DEVICE_LOCK_V2 from homeassistant.components.lock import LockEntity from homeassistant.config_entries import ConfigEntry @@ -27,7 +27,8 @@ async def async_setup_entry( entities = [ YoLinkLockEntity(config_entry, device_coordinator) for device_coordinator in device_coordinators.values() - if device_coordinator.device.device_type == ATTR_DEVICE_LOCK + if device_coordinator.device.device_type + in [ATTR_DEVICE_LOCK, ATTR_DEVICE_LOCK_V2] ] async_add_entities(entities) @@ -50,21 +51,41 @@ class YoLinkLockEntity(YoLinkEntity, LockEntity): def update_entity_state(self, state: dict[str, Any]) -> None: """Update HA Entity State.""" state_value = state.get("state") - self._attr_is_locked = ( - state_value == "locked" if state_value is not None else None - ) + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2: + self._attr_is_locked = ( + state_value["lock"] == "locked" if state_value is not None else None + ) + else: + self._attr_is_locked = ( + state_value == "locked" if state_value is not None else None + ) self.async_write_ha_state() async def call_lock_state_change(self, state: str) -> None: """Call setState api to change lock state.""" - await self.call_device(ClientRequest("setState", {"state": state})) + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2: + await self.call_device( + ClientRequest("setState", {"state": {"lock": state}}) + ) + else: + await self.call_device(ClientRequest("setState", {"state": state})) self._attr_is_locked = state == "lock" self.async_write_ha_state() async def async_lock(self, **kwargs: Any) -> None: """Lock device.""" - await self.call_lock_state_change("lock") + state_param = ( + "locked" + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2 + else "lock" + ) + await self.call_lock_state_change(state_param) async def async_unlock(self, **kwargs: Any) -> None: """Unlock device.""" - await self.call_lock_state_change("unlock") + state_param = ( + "unlocked" + if self.coordinator.device.device_type == ATTR_DEVICE_LOCK_V2 + else "unlock" + ) + await self.call_lock_state_change(state_param) diff --git a/homeassistant/components/yolink/sensor.py b/homeassistant/components/yolink/sensor.py index 77bbccb2f6a..b8f2a77516c 100644 --- a/homeassistant/components/yolink/sensor.py +++ b/homeassistant/components/yolink/sensor.py @@ -40,7 +40,9 @@ from homeassistant.const import ( PERCENTAGE, SIGNAL_STRENGTH_DECIBELS_MILLIWATT, EntityCategory, + UnitOfEnergy, UnitOfLength, + UnitOfPower, UnitOfTemperature, UnitOfVolume, ) @@ -49,6 +51,10 @@ from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.util import percentage from .const import ( + DEV_MODEL_PLUG_YS6602_EC, + DEV_MODEL_PLUG_YS6602_UC, + DEV_MODEL_PLUG_YS6803_EC, + DEV_MODEL_PLUG_YS6803_UC, DEV_MODEL_TH_SENSOR_YS8004_EC, DEV_MODEL_TH_SENSOR_YS8004_UC, DEV_MODEL_TH_SENSOR_YS8014_EC, @@ -125,6 +131,13 @@ NONE_HUMIDITY_SENSOR_MODELS = [ DEV_MODEL_TH_SENSOR_YS8017_EC, ] +POWER_SUPPORT_MODELS = [ + DEV_MODEL_PLUG_YS6602_UC, + DEV_MODEL_PLUG_YS6602_EC, + DEV_MODEL_PLUG_YS6803_UC, + DEV_MODEL_PLUG_YS6803_EC, +] + def cvt_battery(val: int | None) -> int | None: """Convert battery to percentage.""" @@ -228,13 +241,32 @@ SENSOR_TYPES: tuple[YoLinkSensorEntityDescription, ...] = ( key="meter_reading", translation_key="water_meter_reading", device_class=SensorDeviceClass.WATER, - icon="mdi:gauge", native_unit_of_measurement=UnitOfVolume.CUBIC_METERS, state_class=SensorStateClass.TOTAL_INCREASING, should_update_entity=lambda value: value is not None, exists_fn=lambda device: device.device_type in ATTR_DEVICE_WATER_METER_CONTROLLER, ), + YoLinkSensorEntityDescription( + key="power", + translation_key="current_power", + device_class=SensorDeviceClass.POWER, + native_unit_of_measurement=UnitOfPower.WATT, + state_class=SensorStateClass.MEASUREMENT, + should_update_entity=lambda value: value is not None, + exists_fn=lambda device: device.device_model_name in POWER_SUPPORT_MODELS, + value=lambda value: value / 10 if value is not None else None, + ), + YoLinkSensorEntityDescription( + key="watt", + translation_key="power_consumption", + device_class=SensorDeviceClass.ENERGY, + native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR, + state_class=SensorStateClass.TOTAL, + should_update_entity=lambda value: value is not None, + exists_fn=lambda device: device.device_model_name in POWER_SUPPORT_MODELS, + value=lambda value: value / 100 if value is not None else None, + ), ) diff --git a/homeassistant/components/yolink/strings.json b/homeassistant/components/yolink/strings.json index bc8fb435e76..cefc7737a79 100644 --- a/homeassistant/components/yolink/strings.json +++ b/homeassistant/components/yolink/strings.json @@ -51,6 +51,12 @@ "plug_4": { "name": "Plug 4" } }, "sensor": { + "current_power": { + "name": "Current power" + }, + "power_consumption": { + "name": "Power consumption" + }, "power_failure_alarm": { "name": "Power failure alarm", "state": { diff --git a/homeassistant/components/zeroconf/manifest.json b/homeassistant/components/zeroconf/manifest.json index 0a76af3b9c2..8b332400805 100644 --- a/homeassistant/components/zeroconf/manifest.json +++ b/homeassistant/components/zeroconf/manifest.json @@ -8,5 +8,5 @@ "iot_class": "local_push", "loggers": ["zeroconf"], "quality_scale": "internal", - "requirements": ["zeroconf==0.132.2"] + "requirements": ["zeroconf==0.133.0"] } diff --git a/homeassistant/components/zha/diagnostics.py b/homeassistant/components/zha/diagnostics.py index bc4738d032a..ad73978d24d 100644 --- a/homeassistant/components/zha/diagnostics.py +++ b/homeassistant/components/zha/diagnostics.py @@ -7,7 +7,7 @@ from importlib.metadata import version from typing import Any from zha.application.const import ( - ATTR_ATTRIBUTE_NAME, + ATTR_ATTRIBUTE, ATTR_DEVICE_TYPE, ATTR_IEEE, ATTR_IN_CLUSTERS, @@ -50,6 +50,15 @@ ATTRIBUTES = "attributes" CLUSTER_DETAILS = "cluster_details" UNSUPPORTED_ATTRIBUTES = "unsupported_attributes" +BELLOWS_VERSION = version("bellows") +ZIGPY_VERSION = version("zigpy") +ZIGPY_DECONZ_VERSION = version("zigpy-deconz") +ZIGPY_XBEE_VERSION = version("zigpy-xbee") +ZIGPY_ZNP_VERSION = version("zigpy-znp") +ZIGPY_ZIGATE_VERSION = version("zigpy-zigate") +ZHA_QUIRKS_VERSION = version("zha-quirks") +ZHA_VERSION = version("zha") + def shallow_asdict(obj: Any) -> dict: """Return a shallow copy of a dataclass as a dict.""" @@ -86,14 +95,14 @@ async def async_get_config_entry_diagnostics( channel: 100 * energy / 255 for channel, energy in energy_scan.items() }, "versions": { - "bellows": version("bellows"), - "zigpy": version("zigpy"), - "zigpy_deconz": version("zigpy-deconz"), - "zigpy_xbee": version("zigpy-xbee"), - "zigpy_znp": version("zigpy_znp"), - "zigpy_zigate": version("zigpy-zigate"), - "zhaquirks": version("zha-quirks"), - "zha": version("zha"), + "bellows": BELLOWS_VERSION, + "zigpy": ZIGPY_VERSION, + "zigpy_deconz": ZIGPY_DECONZ_VERSION, + "zigpy_xbee": ZIGPY_XBEE_VERSION, + "zigpy_znp": ZIGPY_ZNP_VERSION, + "zigpy_zigate": ZIGPY_ZIGATE_VERSION, + "zhaquirks": ZHA_QUIRKS_VERSION, + "zha": ZHA_VERSION, }, "devices": [ { @@ -158,27 +167,15 @@ def get_endpoint_cluster_attr_data(zha_device: Device) -> dict: def get_cluster_attr_data(cluster: Cluster) -> dict: """Return cluster attribute data.""" - unsupported_attributes = {} - for u_attr in cluster.unsupported_attributes: - try: - u_attr_def = cluster.find_attribute(u_attr) - unsupported_attributes[f"0x{u_attr_def.id:04x}"] = { - ATTR_ATTRIBUTE_NAME: u_attr_def.name - } - except KeyError: - if isinstance(u_attr, int): - unsupported_attributes[f"0x{u_attr:04x}"] = {} - else: - unsupported_attributes[u_attr] = {} - return { ATTRIBUTES: { f"0x{attr_id:04x}": { - ATTR_ATTRIBUTE_NAME: attr_def.name, - ATTR_VALUE: attr_value, + ATTR_ATTRIBUTE: repr(attr_def), + ATTR_VALUE: cluster.get(attr_def.name), } for attr_id, attr_def in cluster.attributes.items() - if (attr_value := cluster.get(attr_def.name)) is not None }, - UNSUPPORTED_ATTRIBUTES: unsupported_attributes, + UNSUPPORTED_ATTRIBUTES: sorted( + cluster.unsupported_attributes, key=lambda v: (isinstance(v, str), v) + ), } diff --git a/homeassistant/components/zha/helpers.py b/homeassistant/components/zha/helpers.py index 35a794e8631..f70c8a9cb3e 100644 --- a/homeassistant/components/zha/helpers.py +++ b/homeassistant/components/zha/helpers.py @@ -802,21 +802,24 @@ class ZHAGatewayProxy(EventBase): ) def _cleanup_group_entity_registry_entries( - self, zigpy_group: zigpy.group.Group + self, zha_group_proxy: ZHAGroupProxy ) -> None: """Remove entity registry entries for group entities when the groups are removed from HA.""" # first we collect the potential unique ids for entities that could be created from this group possible_entity_unique_ids = [ - f"{domain}_zha_group_0x{zigpy_group.group_id:04x}" + f"{domain}_zha_group_0x{zha_group_proxy.group.group_id:04x}" for domain in GROUP_ENTITY_DOMAINS ] # then we get all group entity entries tied to the coordinator entity_registry = er.async_get(self.hass) - assert self.coordinator_zha_device + assert self.gateway.coordinator_zha_device + coordinator_proxy = self.device_proxies[ + self.gateway.coordinator_zha_device.ieee + ] all_group_entity_entries = er.async_entries_for_device( entity_registry, - self.coordinator_zha_device.device_id, + coordinator_proxy.device_id, include_disabled_entities=True, ) @@ -1012,16 +1015,12 @@ def async_get_zha_device_proxy(hass: HomeAssistant, device_id: str) -> ZHADevice _LOGGER.error("Device id `%s` not found in registry", device_id) raise KeyError(f"Device id `{device_id}` not found in registry.") zha_gateway_proxy = get_zha_gateway_proxy(hass) - try: - ieee_address = list(registry_device.identifiers)[0][1] - ieee = EUI64.convert(ieee_address) - except (IndexError, ValueError) as ex: - _LOGGER.error( - "Unable to determine device IEEE for device with device id `%s`", device_id - ) - raise KeyError( - f"Unable to determine device IEEE for device with device id `{device_id}`." - ) from ex + ieee_address = next( + identifier + for domain, identifier in registry_device.identifiers + if domain == DOMAIN + ) + ieee = EUI64.convert(ieee_address) return zha_gateway_proxy.device_proxies[ieee] diff --git a/homeassistant/components/zha/icons.json b/homeassistant/components/zha/icons.json index 9b060e8105a..9d5254fe237 100644 --- a/homeassistant/components/zha/icons.json +++ b/homeassistant/components/zha/icons.json @@ -86,6 +86,18 @@ }, "presence_detection_timeout": { "default": "mdi:timer-edit" + }, + "exercise_trigger_time": { + "default": "mdi:clock" + }, + "external_temperature_sensor": { + "default": "mdi:thermometer" + }, + "load_room_mean": { + "default": "mdi:scale-balance" + }, + "regulation_setpoint_offset": { + "default": "mdi:thermostat" } }, "select": { @@ -94,6 +106,9 @@ }, "keypad_lockout": { "default": "mdi:lock" + }, + "exercise_day_of_week": { + "default": "mdi:wrench-clock" } }, "sensor": { @@ -132,6 +147,15 @@ }, "hooks_state": { "default": "mdi:hook" + }, + "open_window_detected": { + "default": "mdi:window-open" + }, + "load_estimate": { + "default": "mdi:scale-balance" + }, + "preheat_time": { + "default": "mdi:radiator" } }, "switch": { @@ -158,21 +182,60 @@ }, "hooks_locked": { "default": "mdi:lock" + }, + "external_window_sensor": { + "default": "mdi:window-open" + }, + "use_internal_window_detection": { + "default": "mdi:window-open" + }, + "prioritize_external_temperature_sensor": { + "default": "mdi:thermometer" + }, + "heat_available": { + "default": "mdi:water-boiler" + }, + "use_load_balancing": { + "default": "mdi:scale-balance" } } }, "services": { - "permit": "mdi:cellphone-link", - "remove": "mdi:cellphone-remove", - "reconfigure_device": "mdi:cellphone-cog", - "set_zigbee_cluster_attribute": "mdi:cog", - "issue_zigbee_cluster_command": "mdi:console", - "issue_zigbee_group_command": "mdi:console", - "warning_device_squawk": "mdi:alert", - "warning_device_warn": "mdi:alert", - "clear_lock_user_code": "mdi:lock-remove", - "enable_lock_user_code": "mdi:lock", - "disable_lock_user_code": "mdi:lock-off", - "set_lock_user_code": "mdi:lock" + "permit": { + "service": "mdi:cellphone-link" + }, + "remove": { + "service": "mdi:cellphone-remove" + }, + "reconfigure_device": { + "service": "mdi:cellphone-cog" + }, + "set_zigbee_cluster_attribute": { + "service": "mdi:cog" + }, + "issue_zigbee_cluster_command": { + "service": "mdi:console" + }, + "issue_zigbee_group_command": { + "service": "mdi:console" + }, + "warning_device_squawk": { + "service": "mdi:alert" + }, + "warning_device_warn": { + "service": "mdi:alert" + }, + "clear_lock_user_code": { + "service": "mdi:lock-remove" + }, + "enable_lock_user_code": { + "service": "mdi:lock" + }, + "disable_lock_user_code": { + "service": "mdi:lock-off" + }, + "set_lock_user_code": { + "service": "mdi:lock" + } } } diff --git a/homeassistant/components/zha/manifest.json b/homeassistant/components/zha/manifest.json index a5e57fcb1ec..df60829a1e2 100644 --- a/homeassistant/components/zha/manifest.json +++ b/homeassistant/components/zha/manifest.json @@ -21,7 +21,7 @@ "zha", "universal_silabs_flasher" ], - "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.31"], + "requirements": ["universal-silabs-flasher==0.0.22", "zha==0.0.32"], "usb": [ { "vid": "10C4", diff --git a/homeassistant/components/zha/update.py b/homeassistant/components/zha/update.py index e12d048b190..3a857f9d89b 100644 --- a/homeassistant/components/zha/update.py +++ b/homeassistant/components/zha/update.py @@ -95,6 +95,7 @@ class ZHAFirmwareUpdateEntity( UpdateEntityFeature.INSTALL | UpdateEntityFeature.PROGRESS | UpdateEntityFeature.SPECIFIC_VERSION + | UpdateEntityFeature.RELEASE_NOTES ) def __init__(self, entity_data: EntityData, **kwargs: Any) -> None: @@ -143,6 +144,14 @@ class ZHAFirmwareUpdateEntity( """ return self.entity_data.entity.release_summary + async def async_release_notes(self) -> str | None: + """Return full release notes. + + This is suitable for a long changelog that does not fit in the release_summary + property. The returned string can contain markdown. + """ + return self.entity_data.entity.release_notes + @property def release_url(self) -> str | None: """URL to the full release notes of the latest version available.""" @@ -155,7 +164,7 @@ class ZHAFirmwareUpdateEntity( ) -> None: """Install an update.""" try: - await self.entity_data.entity.async_install(version=version, backup=backup) + await self.entity_data.entity.async_install(version=version) except ZHAException as exc: raise HomeAssistantError(exc) from exc finally: diff --git a/homeassistant/components/zone/icons.json b/homeassistant/components/zone/icons.json index a03163179cb..a9829425570 100644 --- a/homeassistant/components/zone/icons.json +++ b/homeassistant/components/zone/icons.json @@ -1,5 +1,7 @@ { "services": { - "reload": "mdi:reload" + "reload": { + "service": "mdi:reload" + } } } diff --git a/homeassistant/components/zoneminder/icons.json b/homeassistant/components/zoneminder/icons.json index 8ca180d7399..3f9f6410a22 100644 --- a/homeassistant/components/zoneminder/icons.json +++ b/homeassistant/components/zoneminder/icons.json @@ -1,5 +1,7 @@ { "services": { - "set_run_state": "mdi:cog" + "set_run_state": { + "service": "mdi:cog" + } } } diff --git a/homeassistant/components/zwave_js/config_flow.py b/homeassistant/components/zwave_js/config_flow.py index e73fa9fc3a7..3e979b224ae 100644 --- a/homeassistant/components/zwave_js/config_flow.py +++ b/homeassistant/components/zwave_js/config_flow.py @@ -366,18 +366,6 @@ class ZWaveJSConfigFlow(BaseZwaveJSFlow, ConfigFlow, domain=DOMAIN): """Return the options flow.""" return OptionsFlowHandler(config_entry) - async def async_step_import(self, data: dict[str, Any]) -> ConfigFlowResult: - """Handle imported data. - - This step will be used when importing data - during Z-Wave to Z-Wave JS migration. - """ - # Note that the data comes from the zwave integration. - # So we don't use our constants here. - self.s0_legacy_key = data.get("network_key") - self.usb_path = data.get("usb_path") - return await self.async_step_user() - async def async_step_user( self, user_input: dict[str, Any] | None = None ) -> ConfigFlowResult: diff --git a/homeassistant/components/zwave_js/helpers.py b/homeassistant/components/zwave_js/helpers.py index 737b8deff34..5885527e01c 100644 --- a/homeassistant/components/zwave_js/helpers.py +++ b/homeassistant/components/zwave_js/helpers.py @@ -343,20 +343,18 @@ def async_get_nodes_from_area_id( } ) # Add devices in an area that are Z-Wave JS devices - for device in dr.async_entries_for_area(dev_reg, area_id): - if next( - ( - config_entry_id - for config_entry_id in device.config_entries - if cast( - ConfigEntry, - hass.config_entries.async_get_entry(config_entry_id), - ).domain - == DOMAIN - ), - None, - ): - nodes.add(async_get_node_from_device_id(hass, device.id, dev_reg)) + nodes.update( + async_get_node_from_device_id(hass, device.id, dev_reg) + for device in dr.async_entries_for_area(dev_reg, area_id) + if any( + cast( + ConfigEntry, + hass.config_entries.async_get_entry(config_entry_id), + ).domain + == DOMAIN + for config_entry_id in device.config_entries + ) + ) return nodes diff --git a/homeassistant/components/zwave_js/icons.json b/homeassistant/components/zwave_js/icons.json index 2956cf2c6e0..b52255e09d1 100644 --- a/homeassistant/components/zwave_js/icons.json +++ b/homeassistant/components/zwave_js/icons.json @@ -57,17 +57,41 @@ } }, "services": { - "bulk_set_partial_config_parameters": "mdi:cogs", - "clear_lock_usercode": "mdi:eraser", - "invoke_cc_api": "mdi:api", - "multicast_set_value": "mdi:list-box", - "ping": "mdi:crosshairs-gps", - "refresh_notifications": "mdi:bell", - "refresh_value": "mdi:refresh", - "reset_meter": "mdi:meter-electric", - "set_config_parameter": "mdi:cog", - "set_lock_configuration": "mdi:shield-lock", - "set_lock_usercode": "mdi:lock-smart", - "set_value": "mdi:form-textbox" + "bulk_set_partial_config_parameters": { + "service": "mdi:cogs" + }, + "clear_lock_usercode": { + "service": "mdi:eraser" + }, + "invoke_cc_api": { + "service": "mdi:api" + }, + "multicast_set_value": { + "service": "mdi:list-box" + }, + "ping": { + "service": "mdi:crosshairs-gps" + }, + "refresh_notifications": { + "service": "mdi:bell" + }, + "refresh_value": { + "service": "mdi:refresh" + }, + "reset_meter": { + "service": "mdi:meter-electric" + }, + "set_config_parameter": { + "service": "mdi:cog" + }, + "set_lock_configuration": { + "service": "mdi:shield-lock" + }, + "set_lock_usercode": { + "service": "mdi:lock-smart" + }, + "set_value": { + "service": "mdi:form-textbox" + } } } diff --git a/homeassistant/components/zwave_js/services.py b/homeassistant/components/zwave_js/services.py index e5c0bd64781..969a235bb41 100644 --- a/homeassistant/components/zwave_js/services.py +++ b/homeassistant/components/zwave_js/services.py @@ -48,6 +48,12 @@ _LOGGER = logging.getLogger(__name__) type _NodeOrEndpointType = ZwaveNode | Endpoint +TARGET_VALIDATORS = { + vol.Optional(ATTR_AREA_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(ATTR_DEVICE_ID): vol.All(cv.ensure_list, [cv.string]), + vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, +} + def parameter_name_does_not_need_bitmask( val: dict[str, int | str | list[str]], @@ -261,13 +267,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Optional(const.ATTR_ENDPOINT, default=0): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Any( vol.Coerce(int), cv.string @@ -305,13 +305,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Optional(const.ATTR_ENDPOINT, default=0): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_PARAMETER): vol.Coerce(int), vol.Required(const.ATTR_CONFIG_VALUE): vol.Any( @@ -356,13 +350,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Required(const.ATTR_COMMAND_CLASS): vol.Coerce(int), vol.Required(const.ATTR_PROPERTY): vol.Any( vol.Coerce(int), str @@ -391,13 +379,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Optional(const.ATTR_BROADCAST, default=False): cv.boolean, vol.Required(const.ATTR_COMMAND_CLASS): vol.Coerce(int), vol.Required(const.ATTR_PROPERTY): vol.Any( @@ -428,15 +410,7 @@ class ZWaveServices: self.async_ping, schema=vol.Schema( vol.All( - { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, - }, + TARGET_VALIDATORS, cv.has_at_least_one_key( ATTR_DEVICE_ID, ATTR_ENTITY_ID, ATTR_AREA_ID ), @@ -453,13 +427,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Required(const.ATTR_COMMAND_CLASS): vol.All( vol.Coerce(int), vol.Coerce(CommandClass) ), @@ -483,13 +451,7 @@ class ZWaveServices: schema=vol.Schema( vol.All( { - vol.Optional(ATTR_AREA_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_DEVICE_ID): vol.All( - cv.ensure_list, [cv.string] - ), - vol.Optional(ATTR_ENTITY_ID): cv.entity_ids, + **TARGET_VALIDATORS, vol.Required(const.ATTR_NOTIFICATION_TYPE): vol.All( vol.Coerce(int), vol.Coerce(NotificationType) ), diff --git a/homeassistant/components/zwave_js/services.yaml b/homeassistant/components/zwave_js/services.yaml index 81809e3fbeb..f5063fdfd93 100644 --- a/homeassistant/components/zwave_js/services.yaml +++ b/homeassistant/components/zwave_js/services.yaml @@ -89,10 +89,28 @@ set_lock_configuration: boolean: set_config_parameter: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true endpoint: example: 1 default: 0 @@ -127,10 +145,28 @@ set_config_parameter: max: 3 bulk_set_partial_config_parameters: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true endpoint: example: 1 default: 0 @@ -169,10 +205,28 @@ refresh_value: boolean: set_value: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true command_class: example: 117 required: true @@ -208,10 +262,28 @@ set_value: boolean: multicast_set_value: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true broadcast: example: true required: false @@ -248,16 +320,55 @@ multicast_set_value: object: ping: - target: - entity: - integration: zwave_js + fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true reset_meter: - target: - entity: - domain: sensor - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + entity: + - integration: zwave_js + domain: sensor + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + entity: + - integration: zwave_js + domain: sensor + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + domain: sensor + multiple: true meter_type: example: 1 required: false @@ -270,10 +381,28 @@ reset_meter: text: invoke_cc_api: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true command_class: example: 132 required: true @@ -296,10 +425,28 @@ invoke_cc_api: object: refresh_notifications: - target: - entity: - integration: zwave_js fields: + area_id: + example: living_room + selector: + area: + device: + - integration: zwave_js + multiple: true + device_id: + example: "8f4219cfa57e23f6f669c4616c2205e2" + selector: + device: + filter: + - integration: zwave_js + multiple: true + entity_id: + example: sensor.living_room_temperature + selector: + entity: + filter: + - integration: zwave_js + multiple: true notification_type: example: 1 required: true diff --git a/homeassistant/components/zwave_js/strings.json b/homeassistant/components/zwave_js/strings.json index 4bba3e0538c..ca7d5153e6e 100644 --- a/homeassistant/components/zwave_js/strings.json +++ b/homeassistant/components/zwave_js/strings.json @@ -265,10 +265,22 @@ "bulk_set_partial_config_parameters": { "description": "Allows for bulk setting partial parameters. Useful when multiple partial parameters have to be set at the same time.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, "endpoint": { "description": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "parameter": { "description": "[%key:component::zwave_js::services::set_config_parameter::fields::parameter::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::parameter::name%]" @@ -293,14 +305,26 @@ "invoke_cc_api": { "description": "Calls a Command Class API on a node. Some Command Classes can't be fully controlled via the `set_value` action and require direct calls to the Command Class API.", "fields": { + "area_id": { + "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", + "name": "Area ID(s)" + }, "command_class": { "description": "The ID of the command class that you want to issue a command to.", "name": "[%key:component::zwave_js::services::set_value::fields::command_class::name%]" }, + "device_id": { + "description": "The device(s) to target for this service.", + "name": "Device ID(s)" + }, "endpoint": { "description": "The endpoint to call the API on. If an endpoint is specified, that endpoint will be targeted for all nodes associated with the target areas, devices, and/or entities. If an endpoint is not specified, the root endpoint (0) will be targeted for nodes associated with target areas and devices, and the endpoint for the primary value of each entity will be targeted.", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, + "entity_id": { + "description": "The entity ID(s) to target for this service.", + "name": "Entity ID(s)" + }, "method_name": { "description": "The name of the API method to call. Refer to the Z-Wave JS Command Class API documentation (https://zwave-js.github.io/node-zwave-js/#/api/CCs/index) for available methods.", "name": "Method name" @@ -315,6 +339,10 @@ "multicast_set_value": { "description": "Changes any value that Z-Wave JS recognizes on multiple Z-Wave devices using multicast, so all devices receive the message simultaneously. This action has minimal validation so only use this action if you know what you are doing.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, "broadcast": { "description": "Whether command should be broadcast to all devices on the network.", "name": "Broadcast?" @@ -323,10 +351,18 @@ "description": "[%key:component::zwave_js::services::set_value::fields::command_class::description%]", "name": "[%key:component::zwave_js::services::set_value::fields::command_class::name%]" }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, "endpoint": { "description": "[%key:component::zwave_js::services::set_value::fields::endpoint::description%]", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "options": { "description": "[%key:component::zwave_js::services::set_value::fields::options::description%]", "name": "[%key:component::zwave_js::services::set_value::fields::options::name%]" @@ -348,11 +384,37 @@ }, "ping": { "description": "Forces Z-Wave JS to try to reach a node. This can be used to update the status of the node in Z-Wave JS when you think it doesn't accurately reflect reality, e.g. reviving a failed/dead node or marking the node as asleep.", + "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + } + }, "name": "Ping a node" }, "refresh_notifications": { "description": "Refreshes notifications on a node based on notification type and optionally notification event.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "notification_event": { "description": "The Notification Event number as defined in the Z-Wave specs.", "name": "Notification Event" @@ -381,6 +443,18 @@ "reset_meter": { "description": "Resets the meters on a node.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "meter_type": { "description": "The type of meter to reset. Not all meters support the ability to pick a meter type to reset.", "name": "Meter type" @@ -395,14 +469,26 @@ "set_config_parameter": { "description": "Changes the configuration parameters of your Z-Wave devices.", "fields": { + "area_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::area_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::area_id::name%]" + }, "bitmask": { "description": "Target a specific bitmask (see the documentation for more information). Cannot be combined with value_size or value_format.", "name": "Bitmask" }, + "device_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::device_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::device_id::name%]" + }, "endpoint": { "description": "The configuration parameter's endpoint.", "name": "Endpoint" }, + "entity_id": { + "description": "[%key:component::zwave_js::services::set_value::fields::entity_id::description%]", + "name": "[%key:component::zwave_js::services::set_value::fields::entity_id::name%]" + }, "parameter": { "description": "The name (or ID) of the configuration parameter you want to configure.", "name": "Parameter" @@ -477,14 +563,26 @@ "set_value": { "description": "Changes any value that Z-Wave JS recognizes on a Z-Wave device. This action has minimal validation so only use this action if you know what you are doing.", "fields": { + "area_id": { + "description": "The area(s) to target for this service. If an area is specified, all zwave_js devices and entities in that area will be targeted for this service.", + "name": "Area ID(s)" + }, "command_class": { "description": "The ID of the command class for the value.", "name": "Command class" }, + "device_id": { + "description": "The device(s) to target for this service.", + "name": "Device ID(s)" + }, "endpoint": { "description": "The endpoint for the value.", "name": "[%key:component::zwave_js::services::set_config_parameter::fields::endpoint::name%]" }, + "entity_id": { + "description": "The entity ID(s) to target for this service.", + "name": "Entity ID(s)" + }, "options": { "description": "Set value options map. Refer to the Z-Wave JS documentation for more information on what options can be set.", "name": "Options" diff --git a/homeassistant/config.py b/homeassistant/config.py index 948ab342e79..9063429ca91 100644 --- a/homeassistant/config.py +++ b/homeassistant/config.py @@ -817,7 +817,9 @@ async def async_process_ha_core_config(hass: HomeAssistant, config: dict) -> Non This method is a coroutine. """ - config = CORE_CONFIG_SCHEMA(config) + # CORE_CONFIG_SCHEMA is not async safe since it uses vol.IsDir + # so we need to run it in an executor job. + config = await hass.async_add_executor_job(CORE_CONFIG_SCHEMA, config) # Only load auth during startup. if not hasattr(hass, "auth"): @@ -1535,7 +1537,9 @@ async def async_process_component_config( # No custom config validator, proceed with schema validation if hasattr(component, "CONFIG_SCHEMA"): try: - return IntegrationConfigInfo(component.CONFIG_SCHEMA(config), []) + return IntegrationConfigInfo( + await cv.async_validate(hass, component.CONFIG_SCHEMA, config), [] + ) except vol.Invalid as exc: exc_info = ConfigExceptionInfo( exc, @@ -1570,7 +1574,9 @@ async def async_process_component_config( # Validate component specific platform schema platform_path = f"{p_name}.{domain}" try: - p_validated = component_platform_schema(p_config) + p_validated = await cv.async_validate( + hass, component_platform_schema, p_config + ) except vol.Invalid as exc: exc_info = ConfigExceptionInfo( exc, diff --git a/homeassistant/config_entries.py b/homeassistant/config_entries.py index aa0113cd7ce..f3b0aa03383 100644 --- a/homeassistant/config_entries.py +++ b/homeassistant/config_entries.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections import UserDict +from collections import UserDict, defaultdict from collections.abc import ( Callable, Coroutine, @@ -1224,8 +1224,12 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): super().__init__(hass) self.config_entries = config_entries self._hass_config = hass_config - self._pending_import_flows: dict[str, dict[str, asyncio.Future[None]]] = {} - self._initialize_futures: dict[str, list[asyncio.Future[None]]] = {} + self._pending_import_flows: defaultdict[ + str, dict[str, asyncio.Future[None]] + ] = defaultdict(dict) + self._initialize_futures: defaultdict[str, set[asyncio.Future[None]]] = ( + defaultdict(set) + ) self._discovery_debouncer = Debouncer[None]( hass, _LOGGER, @@ -1245,10 +1249,10 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): @callback def _async_has_other_discovery_flows(self, flow_id: str) -> bool: """Check if there are any other discovery flows in progress.""" - return any( - flow.context["source"] in DISCOVERY_SOURCES and flow.flow_id != flow_id - for flow in self._progress.values() - ) + for flow in self._progress.values(): + if flow.flow_id != flow_id and flow.context["source"] in DISCOVERY_SOURCES: + return True + return False async def async_init( self, handler: str, *, context: dict[str, Any] | None = None, data: Any = None @@ -1278,12 +1282,11 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): loop = self.hass.loop if context["source"] == SOURCE_IMPORT: - self._pending_import_flows.setdefault(handler, {})[flow_id] = ( - loop.create_future() - ) + self._pending_import_flows[handler][flow_id] = loop.create_future() cancel_init_future = loop.create_future() - self._initialize_futures.setdefault(handler, []).append(cancel_init_future) + handler_init_futures = self._initialize_futures[handler] + handler_init_futures.add(cancel_init_future) try: async with interrupt( cancel_init_future, @@ -1294,8 +1297,13 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): except FlowCancelledError as ex: raise asyncio.CancelledError from ex finally: - self._initialize_futures[handler].remove(cancel_init_future) - self._pending_import_flows.get(handler, {}).pop(flow_id, None) + handler_init_futures.remove(cancel_init_future) + if not handler_init_futures: + del self._initialize_futures[handler] + if handler in self._pending_import_flows: + self._pending_import_flows[handler].pop(flow_id, None) + if not self._pending_import_flows[handler]: + del self._pending_import_flows[handler] if result["type"] != data_entry_flow.FlowResultType.ABORT: await self.async_post_init(flow, result) @@ -1322,11 +1330,18 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): try: result = await self._async_handle_step(flow, flow.init_step, data) finally: - init_done = self._pending_import_flows.get(handler, {}).get(flow_id) - if init_done and not init_done.done(): - init_done.set_result(None) + self._set_pending_import_done(flow) return flow, result + def _set_pending_import_done(self, flow: ConfigFlow) -> None: + """Set pending import flow as done.""" + if ( + (handler_import_flows := self._pending_import_flows.get(flow.handler)) + and (init_done := handler_import_flows.get(flow.flow_id)) + and not init_done.done() + ): + init_done.set_result(None) + @callback def async_shutdown(self) -> None: """Cancel any initializing flows.""" @@ -1347,9 +1362,7 @@ class ConfigEntriesFlowManager(data_entry_flow.FlowManager[ConfigFlowResult]): # We do this to avoid a circular dependency where async_finish_flow sets up a # new entry, which needs the integration to be set up, which is waiting for # init to be done. - init_done = self._pending_import_flows.get(flow.handler, {}).get(flow.flow_id) - if init_done and not init_done.done(): - init_done.set_result(None) + self._set_pending_import_done(flow) # Remove notification if no other discovery config entries in progress if not self._async_has_other_discovery_flows(flow.flow_id): @@ -1699,12 +1712,12 @@ class ConfigEntries: entries = self._entries.get_entries_for_domain(domain) if include_ignore and include_disabled: return bool(entries) - return any( - entry - for entry in entries - if (include_ignore or entry.source != SOURCE_IGNORE) - and (include_disabled or not entry.disabled_by) - ) + for entry in entries: + if (include_ignore or entry.source != SOURCE_IGNORE) and ( + include_disabled or not entry.disabled_by + ): + return True + return False @callback def async_entries( @@ -1729,6 +1742,16 @@ class ConfigEntries: and (include_disabled or not entry.disabled_by) ] + @callback + def async_loaded_entries(self, domain: str) -> list[ConfigEntry]: + """Return loaded entries for a specific domain. + + This will exclude ignored or disabled config entruis. + """ + entries = self._entries.get_entries_for_domain(domain) + + return [entry for entry in entries if entry.state == ConfigEntryState.LOADED] + @callback def async_entry_for_domain_unique_id( self, domain: str, unique_id: str diff --git a/homeassistant/const.py b/homeassistant/const.py index 2a06c24843a..5c61650ec32 100644 --- a/homeassistant/const.py +++ b/homeassistant/const.py @@ -23,8 +23,8 @@ if TYPE_CHECKING: APPLICATION_NAME: Final = "HomeAssistant" MAJOR_VERSION: Final = 2024 -MINOR_VERSION: Final = 8 -PATCH_VERSION: Final = "3" +MINOR_VERSION: Final = 9 +PATCH_VERSION: Final = "0" __short_version__: Final = f"{MAJOR_VERSION}.{MINOR_VERSION}" __version__: Final = f"{__short_version__}.{PATCH_VERSION}" REQUIRED_PYTHON_VER: Final[tuple[int, int, int]] = (3, 12, 0) @@ -60,7 +60,6 @@ class Platform(StrEnum): LAWN_MOWER = "lawn_mower" LIGHT = "light" LOCK = "lock" - MAILBOX = "mailbox" MEDIA_PLAYER = "media_player" NOTIFY = "notify" NUMBER = "number" @@ -222,6 +221,7 @@ CONF_METHOD: Final = "method" CONF_MINIMUM: Final = "minimum" CONF_MODE: Final = "mode" CONF_MODEL: Final = "model" +CONF_MODEL_ID: Final = "model_id" CONF_MONITORED_CONDITIONS: Final = "monitored_conditions" CONF_MONITORED_VARIABLES: Final = "monitored_variables" CONF_NAME: Final = "name" @@ -565,6 +565,7 @@ ATTR_CONNECTIONS: Final = "connections" ATTR_DEFAULT_NAME: Final = "default_name" ATTR_MANUFACTURER: Final = "manufacturer" ATTR_MODEL: Final = "model" +ATTR_MODEL_ID: Final = "model_id" ATTR_SERIAL_NUMBER: Final = "serial_number" ATTR_SUGGESTED_AREA: Final = "suggested_area" ATTR_SW_VERSION: Final = "sw_version" @@ -671,8 +672,19 @@ _DEPRECATED_POWER_BTU_PER_HOUR: Final = DeprecatedConstantEnum( ) """Deprecated: please use UnitOfPower.BTU_PER_HOUR.""" + # Reactive power units -POWER_VOLT_AMPERE_REACTIVE: Final = "var" +class UnitOfReactivePower(StrEnum): + """Reactive power units.""" + + VOLT_AMPERE_REACTIVE = "var" + + +_DEPRECATED_POWER_VOLT_AMPERE_REACTIVE: Final = DeprecatedConstantEnum( + UnitOfReactivePower.VOLT_AMPERE_REACTIVE, + "2025.9", +) +"""Deprecated: please use UnitOfReactivePower.VOLT_AMPERE_REACTIVE.""" # Energy units diff --git a/homeassistant/core.py b/homeassistant/core.py index 5d223b9f19f..b797798134e 100644 --- a/homeassistant/core.py +++ b/homeassistant/core.py @@ -101,6 +101,7 @@ from .util import dt as dt_util, location from .util.async_ import ( cancelling, create_eager_task, + get_scheduled_timer_handles, run_callback_threadsafe, shutdown_run_callback_threadsafe, ) @@ -450,9 +451,7 @@ class HomeAssistant: self.import_executor = InterruptibleThreadPoolExecutor( max_workers=1, thread_name_prefix="ImportExecutor" ) - self.loop_thread_id = getattr( - self.loop, "_thread_ident", getattr(self.loop, "_thread_id") - ) + self.loop_thread_id = getattr(self.loop, "_thread_id") def verify_event_loop_thread(self, what: str) -> None: """Report and raise if we are not running in the event loop thread.""" @@ -1227,8 +1226,7 @@ class HomeAssistant: def _cancel_cancellable_timers(self) -> None: """Cancel timer handles marked as cancellable.""" - handles: Iterable[asyncio.TimerHandle] = self.loop._scheduled # type: ignore[attr-defined] # noqa: SLF001 - for handle in handles: + for handle in get_scheduled_timer_handles(self.loop): if ( not handle.cancelled() and (args := handle._args) # noqa: SLF001 diff --git a/homeassistant/generated/amazon_polly.py b/homeassistant/generated/amazon_polly.py new file mode 100644 index 00000000000..1d870bf6c92 --- /dev/null +++ b/homeassistant/generated/amazon_polly.py @@ -0,0 +1,137 @@ +"""Automatically generated file. + +To update, run python3 -m script.amazon_polly +""" + +from __future__ import annotations + +from typing import Final + +SUPPORTED_ENGINES: Final[set[str]] = { + "generative", + "long-form", + "neural", + "standard", +} + +SUPPORTED_REGIONS: Final[set[str]] = { + "af-south-1", + "ap-east-1", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-south-1", + "ap-southeast-1", + "ap-southeast-2", + "ca-central-1", + "eu-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "me-south-1", + "sa-east-1", + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", +} + +SUPPORTED_VOICES: Final[set[str]] = { + "Aditi", + "Adriano", + "Amy", + "Andres", + "Aria", + "Arlet", + "Arthur", + "Astrid", + "Ayanda", + "Bianca", + "Brian", + "Burcu", + "Camila", + "Carla", + "Carmen", + "Celine", + "Chantal", + "Conchita", + "Cristiano", + "Daniel", + "Danielle", + "Dora", + "Elin", + "Emma", + "Enrique", + "Ewa", + "Filiz", + "Gabrielle", + "Geraint", + "Giorgio", + "Gregory", + "Gwyneth", + "Hala", + "Hannah", + "Hans", + "Hiujin", + "Ida", + "Ines", + "Isabelle", + "Ivy", + "Jacek", + "Jan", + "Joanna", + "Joey", + "Justin", + "Kajal", + "Karl", + "Kazuha", + "Kendra", + "Kevin", + "Kimberly", + "Laura", + "Lea", + "Liam", + "Lisa", + "Liv", + "Lotte", + "Lucia", + "Lupe", + "Mads", + "Maja", + "Marlene", + "Mathieu", + "Matthew", + "Maxim", + "Mia", + "Miguel", + "Mizuki", + "Naja", + "Niamh", + "Nicole", + "Ola", + "Olivia", + "Pedro", + "Penelope", + "Raveena", + "Remi", + "Ricardo", + "Ruben", + "Russell", + "Ruth", + "Salli", + "Seoyeon", + "Sergio", + "Sofie", + "Stephen", + "Suvi", + "Takumi", + "Tatyana", + "Thiago", + "Tomoko", + "Vicki", + "Vitoria", + "Zayd", + "Zeina", + "Zhiyu", +} diff --git a/homeassistant/generated/application_credentials.py b/homeassistant/generated/application_credentials.py index dc30f9d76f0..75fd489bad3 100644 --- a/homeassistant/generated/application_credentials.py +++ b/homeassistant/generated/application_credentials.py @@ -29,6 +29,7 @@ APPLICATION_CREDENTIALS = [ "twitch", "withings", "xbox", + "yale", "yolink", "youtube", ] diff --git a/homeassistant/generated/config_flows.py b/homeassistant/generated/config_flows.py index 0c37cf9c412..afa906fd371 100644 --- a/homeassistant/generated/config_flows.py +++ b/homeassistant/generated/config_flows.py @@ -53,6 +53,7 @@ FLOWS = { "androidtv_remote", "anova", "anthemav", + "anthropic", "aosmith", "apcupsd", "apple_tv", @@ -199,6 +200,7 @@ FLOWS = { "fritzbox_callmonitor", "fronius", "frontier_silicon", + "fujitsu_fglair", "fully_kiosk", "fyta", "garages_amsterdam", @@ -308,6 +310,7 @@ FLOWS = { "lastfm", "launch_library", "laundrify", + "lcn", "ld2410_ble", "leaone", "led_ble", @@ -324,7 +327,6 @@ FLOWS = { "local_ip", "local_todo", "locative", - "logi_circle", "lookin", "loqed", "luftdaten", @@ -385,6 +387,7 @@ FLOWS = { "nextdns", "nfandroidtv", "nibe_heatpump", + "nice_go", "nightscout", "nina", "nmap_tracker", @@ -523,6 +526,7 @@ FLOWS = { "smartthings", "smarttub", "smhi", + "smlight", "sms", "snapcast", "snooz", @@ -590,6 +594,7 @@ FLOWS = { "tomorrowio", "toon", "totalconnect", + "touchline_sl", "tplink", "tplink_omada", "traccar", @@ -659,6 +664,7 @@ FLOWS = { "xiaomi_aqara", "xiaomi_ble", "xiaomi_miio", + "yale", "yale_smart_alarm", "yalexs_ble", "yamaha_musiccast", diff --git a/homeassistant/generated/dhcp.py b/homeassistant/generated/dhcp.py index f6df799d01e..8f5964f1618 100644 --- a/homeassistant/generated/dhcp.py +++ b/homeassistant/generated/dhcp.py @@ -12,11 +12,6 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "airzone", "macaddress": "E84F25*", }, - { - "domain": "august", - "hostname": "yale-connect-plus", - "macaddress": "00177A*", - }, { "domain": "august", "hostname": "connect", @@ -1094,6 +1089,11 @@ DHCP: Final[list[dict[str, str | bool]]] = [ "domain": "wiz", "hostname": "wiz_*", }, + { + "domain": "yale", + "hostname": "yale-connect-plus", + "macaddress": "00177A*", + }, { "domain": "yeelight", "hostname": "yeelink-*", diff --git a/homeassistant/generated/integrations.json b/homeassistant/generated/integrations.json index 13009fb58be..5f155c2926f 100644 --- a/homeassistant/generated/integrations.json +++ b/homeassistant/generated/integrations.json @@ -309,6 +309,12 @@ "config_flow": true, "iot_class": "local_push" }, + "anthropic": { + "name": "Anthropic Conversation", + "integration_type": "service", + "config_flow": true, + "iot_class": "cloud_polling" + }, "anwb_energie": { "name": "ANWB Energie", "integration_type": "virtual", @@ -408,7 +414,7 @@ "iot_class": "local_polling" }, "aquacell": { - "name": "Aquacell", + "name": "AquaCell", "integration_type": "device", "config_flow": true, "iot_class": "cloud_polling" @@ -449,6 +455,11 @@ "config_flow": false, "iot_class": "local_polling" }, + "artsound": { + "name": "ArtSound", + "integration_type": "virtual", + "supported_by": "linkplay" + }, "aruba": { "name": "Aruba", "integrations": { @@ -484,23 +495,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "asterisk": { - "name": "Asterisk", - "integrations": { - "asterisk_cdr": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling", - "name": "Asterisk Call Detail Records" - }, - "asterisk_mbox": { - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_push", - "name": "Asterisk Voicemail" - } - } - }, "asuswrt": { "name": "ASUSWRT", "integration_type": "hub", @@ -2060,10 +2054,22 @@ "config_flow": true, "iot_class": "local_polling" }, - "fujitsu_anywair": { - "name": "Fujitsu anywAIR", - "integration_type": "virtual", - "supported_by": "advantage_air" + "fujitsu": { + "name": "Fujitsu", + "integrations": { + "fujitsu_anywair": { + "integration_type": "virtual", + "config_flow": false, + "supported_by": "advantage_air", + "name": "Fujitsu anywAIR" + }, + "fujitsu_fglair": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "FGLair" + } + } }, "fully_kiosk": { "name": "Fully Kiosk Browser", @@ -3172,7 +3178,7 @@ "lcn": { "name": "LCN", "integration_type": "hub", - "config_flow": false, + "config_flow": true, "iot_class": "local_push" }, "ld2410_ble": { @@ -3350,12 +3356,6 @@ "config_flow": false, "iot_class": "cloud_push" }, - "logi_circle": { - "name": "Logi Circle", - "integration_type": "hub", - "config_flow": true, - "iot_class": "cloud_polling" - }, "logitech": { "name": "Logitech", "integrations": { @@ -3460,12 +3460,6 @@ "config_flow": true, "iot_class": "cloud_push" }, - "manual": { - "name": "Manual Alarm Control Panel", - "integration_type": "hub", - "config_flow": false, - "iot_class": "calculated" - }, "marantz": { "name": "Marantz", "integration_type": "virtual", @@ -4047,6 +4041,12 @@ "config_flow": true, "iot_class": "local_polling" }, + "nice_go": { + "name": "Nice G.O.", + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push" + }, "nightscout": { "name": "Nightscout", "integration_type": "hub", @@ -5134,6 +5134,23 @@ "config_flow": true, "iot_class": "local_push" }, + "roth": { + "name": "Roth", + "integrations": { + "touchline": { + "integration_type": "hub", + "config_flow": false, + "iot_class": "local_polling", + "name": "Roth Touchline" + }, + "touchline_sl": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_polling", + "name": "Roth Touchline SL" + } + } + }, "rova": { "name": "ROVA", "integration_type": "hub", @@ -5588,6 +5605,12 @@ "config_flow": true, "iot_class": "cloud_polling" }, + "smlight": { + "name": "SMLIGHT SLZB", + "integration_type": "device", + "config_flow": true, + "iot_class": "local_polling" + }, "sms": { "name": "SMS notifications via GSM-modem", "integration_type": "hub", @@ -6291,12 +6314,6 @@ "config_flow": true, "iot_class": "cloud_polling" }, - "touchline": { - "name": "Roth Touchline", - "integration_type": "hub", - "config_flow": false, - "iot_class": "local_polling" - }, "tplink": { "name": "TP-Link", "integrations": { @@ -6988,8 +7005,14 @@ "yale_home": { "integration_type": "virtual", "config_flow": false, - "supported_by": "august", + "supported_by": "yale", "name": "Yale Home" + }, + "yale": { + "integration_type": "hub", + "config_flow": true, + "iot_class": "cloud_push", + "name": "Yale" } } }, @@ -7229,6 +7252,12 @@ "config_flow": true, "iot_class": "local_push" }, + "manual": { + "name": "Manual Alarm Control Panel", + "integration_type": "helper", + "config_flow": false, + "iot_class": "calculated" + }, "min_max": { "integration_type": "helper", "config_flow": true, diff --git a/homeassistant/generated/zeroconf.py b/homeassistant/generated/zeroconf.py index 7cd60da2d0e..3e5e34090d1 100644 --- a/homeassistant/generated/zeroconf.py +++ b/homeassistant/generated/zeroconf.py @@ -747,6 +747,9 @@ ZEROCONF = { }, ], "_slzb-06._tcp.local.": [ + { + "domain": "smlight", + }, { "domain": "zha", "name": "slzb-06*", diff --git a/homeassistant/helpers/aiohttp_client.py b/homeassistant/helpers/aiohttp_client.py index 6f52569c38c..d61f889d4b5 100644 --- a/homeassistant/helpers/aiohttp_client.py +++ b/homeassistant/helpers/aiohttp_client.py @@ -285,6 +285,21 @@ def _make_key( return (verify_ssl, family) +class HomeAssistantTCPConnector(aiohttp.TCPConnector): + """Home Assistant TCP Connector. + + Same as aiohttp.TCPConnector but with a longer cleanup_closed timeout. + + By default the cleanup_closed timeout is 2 seconds. This is too short + for Home Assistant since we churn through a lot of connections. We set + it to 60 seconds to reduce the overhead of aborting TLS connections + that are likely already closed. + """ + + # abort transport after 60 seconds (cleanup broken connections) + _cleanup_closed_period = 60.0 + + @callback def _async_get_connector( hass: HomeAssistant, @@ -306,7 +321,7 @@ def _async_get_connector( else: ssl_context = ssl_util.get_default_no_verify_context() - connector = aiohttp.TCPConnector( + connector = HomeAssistantTCPConnector( family=family, enable_cleanup_closed=ENABLE_CLEANUP_CLOSED, ssl=ssl_context, diff --git a/homeassistant/helpers/area_registry.py b/homeassistant/helpers/area_registry.py index 3e101f185ed..5009ec654cf 100644 --- a/homeassistant/helpers/area_registry.py +++ b/homeassistant/helpers/area_registry.py @@ -153,22 +153,23 @@ class AreaRegistryItems(NormalizedNameBaseRegistryItems[AreaEntry]): def _index_entry(self, key: str, entry: AreaEntry) -> None: """Index an entry.""" + super()._index_entry(key, entry) if entry.floor_id is not None: self._floors_index[entry.floor_id][key] = True for label in entry.labels: self._labels_index[label][key] = True - super()._index_entry(key, entry) def _unindex_entry( self, key: str, replacement_entry: AreaEntry | None = None ) -> None: + # always call base class before other indices + super()._unindex_entry(key, replacement_entry) entry = self.data[key] if labels := entry.labels: for label in labels: self._unindex_entry_value(key, label, self._labels_index) if floor_id := entry.floor_id: self._unindex_entry_value(key, floor_id, self._floors_index) - return super()._unindex_entry(key, replacement_entry) def get_areas_for_label(self, label: str) -> list[AreaEntry]: """Get areas for label.""" diff --git a/homeassistant/helpers/check_config.py b/homeassistant/helpers/check_config.py index 06d836e8c20..43021fffac5 100644 --- a/homeassistant/helpers/check_config.py +++ b/homeassistant/helpers/check_config.py @@ -234,7 +234,7 @@ async def async_check_ha_config_file( # noqa: C901 config_schema = getattr(component, "CONFIG_SCHEMA", None) if config_schema is not None: try: - validated_config = config_schema(config) + validated_config = await cv.async_validate(hass, config_schema, config) # Don't fail if the validator removed the domain from the config if domain in validated_config: result[domain] = validated_config[domain] @@ -255,7 +255,9 @@ async def async_check_ha_config_file( # noqa: C901 for p_name, p_config in config_per_platform(config, domain): # Validate component specific platform schema try: - p_validated = component_platform_schema(p_config) + p_validated = await cv.async_validate( + hass, component_platform_schema, p_config + ) except vol.Invalid as ex: _comp_error(ex, domain, p_config, p_config) continue diff --git a/homeassistant/helpers/collection.py b/homeassistant/helpers/collection.py index 9151a9dfc6b..86d3450c3a0 100644 --- a/homeassistant/helpers/collection.py +++ b/homeassistant/helpers/collection.py @@ -7,6 +7,7 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine, Iterable from dataclasses import dataclass from functools import partial +from hashlib import md5 from itertools import groupby import logging from operator import attrgetter @@ -25,6 +26,7 @@ from homeassistant.util import slugify from . import entity_registry from .entity import Entity from .entity_component import EntityComponent +from .json import json_bytes from .storage import Store from .typing import ConfigType, VolDictType @@ -50,6 +52,7 @@ class CollectionChange: change_type: str item_id: str item: Any + item_hash: str | None = None type ChangeListener = Callable[ @@ -273,7 +276,9 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( await self.notify_changes( [ - CollectionChange(CHANGE_ADDED, item[CONF_ID], item) + CollectionChange( + CHANGE_ADDED, item[CONF_ID], item, self._hash_item(item) + ) for item in raw_storage["items"] ] ) @@ -313,7 +318,16 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( item = self._create_item(item_id, validated_data) self.data[item_id] = item self._async_schedule_save() - await self.notify_changes([CollectionChange(CHANGE_ADDED, item_id, item)]) + await self.notify_changes( + [ + CollectionChange( + CHANGE_ADDED, + item_id, + item, + self._hash_item(self._serialize_item(item_id, item)), + ) + ] + ) return item async def async_update_item(self, item_id: str, updates: dict) -> _ItemT: @@ -331,7 +345,16 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( self.data[item_id] = updated self._async_schedule_save() - await self.notify_changes([CollectionChange(CHANGE_UPDATED, item_id, updated)]) + await self.notify_changes( + [ + CollectionChange( + CHANGE_UPDATED, + item_id, + updated, + self._hash_item(self._serialize_item(item_id, updated)), + ) + ] + ) return self.data[item_id] @@ -365,6 +388,10 @@ class StorageCollection[_ItemT, _StoreT: SerializedStorageCollection]( def _data_to_save(self) -> _StoreT: """Return JSON-compatible date for storing to file.""" + def _hash_item(self, item: dict) -> str: + """Return a hash of the item.""" + return md5(json_bytes(item)).hexdigest() + class DictStorageCollection(StorageCollection[dict, SerializedStorageCollection]): """A specialized StorageCollection where the items are untyped dicts.""" @@ -464,6 +491,10 @@ class _CollectionLifeCycle(Generic[_EntityT]): async def _update_entity(self, change_set: CollectionChange) -> None: if entity := self.entities.get(change_set.item_id): + if change_set.item_hash: + self.ent_reg.async_update_entity_options( + entity.entity_id, "collection", {"hash": change_set.item_hash} + ) await entity.async_update_config(change_set.item) async def _collection_changed(self, change_set: Iterable[CollectionChange]) -> None: diff --git a/homeassistant/helpers/condition.py b/homeassistant/helpers/condition.py index 3438336dbfa..629cdeef942 100644 --- a/homeassistant/helpers/condition.py +++ b/homeassistant/helpers/condition.py @@ -60,7 +60,7 @@ import homeassistant.util.dt as dt_util from . import config_validation as cv, entity_registry as er from .sun import get_astral_event_date -from .template import Template, attach as template_attach, render_complex +from .template import Template, render_complex from .trace import ( TraceElement, trace_append_element, @@ -510,9 +510,6 @@ def async_numeric_state_from_config(config: ConfigType) -> ConditionCheckerType: hass: HomeAssistant, variables: TemplateVarsType = None ) -> bool: """Test numeric state condition.""" - if value_template is not None: - value_template.hass = hass - errors = [] for index, entity_id in enumerate(entity_ids): try: @@ -630,7 +627,6 @@ def state_from_config(config: ConfigType) -> ConditionCheckerType: @trace_condition_function def if_state(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Test if condition.""" - template_attach(hass, for_period) errors = [] result: bool = match != ENTITY_MATCH_ANY for index, entity_id in enumerate(entity_ids): @@ -792,8 +788,6 @@ def async_template_from_config(config: ConfigType) -> ConditionCheckerType: @trace_condition_function def template_if(hass: HomeAssistant, variables: TemplateVarsType = None) -> bool: """Validate template based if-condition.""" - value_template.hass = hass - return async_template(hass, value_template, variables) return template_if diff --git a/homeassistant/helpers/config_validation.py b/homeassistant/helpers/config_validation.py index cd6670dc597..d88c388f9c7 100644 --- a/homeassistant/helpers/config_validation.py +++ b/homeassistant/helpers/config_validation.py @@ -6,6 +6,7 @@ from collections.abc import Callable, Hashable import contextlib +from contextvars import ContextVar from datetime import ( date as date_sys, datetime as datetime_sys, @@ -13,6 +14,7 @@ from datetime import ( timedelta, ) from enum import Enum, StrEnum +import functools import logging from numbers import Number import os @@ -20,6 +22,7 @@ import re from socket import ( # type: ignore[attr-defined] # private, not in typeshed _GLOBAL_DEFAULT_TIMEOUT, ) +import threading from typing import Any, cast, overload from urllib.parse import urlparse from uuid import UUID @@ -94,6 +97,7 @@ from homeassistant.const import ( ) from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, + HomeAssistant, async_get_hass, async_get_hass_or_none, split_entity_id, @@ -114,6 +118,51 @@ from .typing import VolDictType, VolSchemaType TIME_PERIOD_ERROR = "offset {} should be format 'HH:MM', 'HH:MM:SS' or 'HH:MM:SS.F'" +class MustValidateInExecutor(HomeAssistantError): + """Raised when validation must happen in an executor thread.""" + + +class _Hass(threading.local): + """Container which makes a HomeAssistant instance available to validators.""" + + hass: HomeAssistant | None = None + + +_hass = _Hass() +"""Set when doing async friendly schema validation.""" + + +def _async_get_hass_or_none() -> HomeAssistant | None: + """Return the HomeAssistant instance or None. + + First tries core.async_get_hass_or_none, then _hass which is + set when doing async friendly schema validation. + """ + return async_get_hass_or_none() or _hass.hass + + +_validating_async: ContextVar[bool] = ContextVar("_validating_async", default=False) +"""Set to True when doing async friendly schema validation.""" + + +def not_async_friendly[**_P, _R](validator: Callable[_P, _R]) -> Callable[_P, _R]: + """Mark a validator as not async friendly. + + This makes validation happen in an executor thread if validation is done by + async_validate, otherwise does nothing. + """ + + @functools.wraps(validator) + def _not_async_friendly(*args: _P.args, **kwargs: _P.kwargs) -> _R: + if _validating_async.get() and async_get_hass_or_none(): + # Raise if doing async friendly validation and validation + # is happening in the event loop + raise MustValidateInExecutor + return validator(*args, **kwargs) + + return _not_async_friendly + + class UrlProtocolSchema(StrEnum): """Valid URL protocol schema values.""" @@ -217,6 +266,7 @@ def whitespace(value: Any) -> str: raise vol.Invalid(f"contains non-whitespace: {value}") +@not_async_friendly def isdevice(value: Any) -> str: """Validate that value is a real device.""" try: @@ -258,6 +308,7 @@ def is_regex(value: Any) -> re.Pattern[Any]: return r +@not_async_friendly def isfile(value: Any) -> str: """Validate that the value is an existing file.""" if value is None: @@ -271,6 +322,7 @@ def isfile(value: Any) -> str: return file_in +@not_async_friendly def isdir(value: Any) -> str: """Validate that the value is an existing dir.""" if value is None: @@ -663,8 +715,19 @@ def template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value is None") if isinstance(value, (list, dict, template_helper.Template)): raise vol.Invalid("template value should be a string") + if not (hass := _async_get_hass_or_none()): + # pylint: disable-next=import-outside-toplevel + from .frame import report - template_value = template_helper.Template(str(value), async_get_hass_or_none()) + report( + ( + "validates schema outside the event loop, " + "which will stop working in HA Core 2025.10" + ), + error_if_core=False, + ) + + template_value = template_helper.Template(str(value), hass) try: template_value.ensure_valid() @@ -681,8 +744,19 @@ def dynamic_template(value: Any | None) -> template_helper.Template: raise vol.Invalid("template value should be a string") if not template_helper.is_template_string(str(value)): raise vol.Invalid("template value does not contain a dynamic template") + if not (hass := _async_get_hass_or_none()): + # pylint: disable-next=import-outside-toplevel + from .frame import report - template_value = template_helper.Template(str(value), async_get_hass_or_none()) + report( + ( + "validates schema outside the event loop, " + "which will stop working in HA Core 2025.10" + ), + error_if_core=False, + ) + + template_value = template_helper.Template(str(value), hass) try: template_value.ensure_valid() @@ -770,9 +844,9 @@ def socket_timeout(value: Any | None) -> object: float_value = float(value) if float_value > 0.0: return float_value - raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.") except Exception as err: raise vol.Invalid(f"Invalid socket timeout: {err}") from err + raise vol.Invalid("Invalid socket timeout value. float > 0.0 required.") def url( @@ -1038,6 +1112,11 @@ def key_dependency[_KT: Hashable, _VT]( def custom_serializer(schema: Any) -> Any: + """Serialize additional types for voluptuous_serialize.""" + return _custom_serializer(schema, allow_section=True) + + +def _custom_serializer(schema: Any, *, allow_section: bool) -> Any: """Serialize additional types for voluptuous_serialize.""" from .. import data_entry_flow # pylint: disable=import-outside-toplevel from . import selector # pylint: disable=import-outside-toplevel @@ -1052,10 +1131,15 @@ def custom_serializer(schema: Any) -> Any: return {"type": "boolean"} if isinstance(schema, data_entry_flow.section): + if not allow_section: + raise ValueError("Nesting expandable sections is not supported") return { "type": "expandable", "schema": voluptuous_serialize.convert( - schema.schema, custom_serializer=custom_serializer + schema.schema, + custom_serializer=functools.partial( + _custom_serializer, allow_section=False + ), ), "expanded": not schema.options["collapsed"], } @@ -1253,37 +1337,56 @@ TARGET_SERVICE_FIELDS = { _HAS_ENTITY_SERVICE_FIELD = has_at_least_one_key(*ENTITY_SERVICE_FIELDS) -def _make_entity_service_schema(schema: dict, extra: int) -> vol.Schema: +def is_entity_service_schema(validator: VolSchemaType) -> bool: + """Check if the passed validator is an entity schema validator. + + The validator must be either of: + - A validator returned by cv._make_entity_service_schema + - A validator returned by cv._make_entity_service_schema, wrapped in a vol.Schema + - A validator returned by cv._make_entity_service_schema, wrapped in a vol.All + Nesting is allowed. + """ + if hasattr(validator, "_entity_service_schema"): + return True + if isinstance(validator, (vol.All)): + return any(is_entity_service_schema(val) for val in validator.validators) + if isinstance(validator, (vol.Schema)): + return is_entity_service_schema(validator.schema) + + return False + + +def _make_entity_service_schema(schema: dict, extra: int) -> VolSchemaType: """Create an entity service schema.""" - return vol.Schema( - vol.All( - vol.Schema( - { - # The frontend stores data here. Don't use in core. - vol.Remove("metadata"): dict, - **schema, - **ENTITY_SERVICE_FIELDS, - }, - extra=extra, - ), - _HAS_ENTITY_SERVICE_FIELD, - ) + validator = vol.All( + vol.Schema( + { + # The frontend stores data here. Don't use in core. + vol.Remove("metadata"): dict, + **schema, + **ENTITY_SERVICE_FIELDS, + }, + extra=extra, + ), + _HAS_ENTITY_SERVICE_FIELD, ) + setattr(validator, "_entity_service_schema", True) + return validator BASE_ENTITY_SCHEMA = _make_entity_service_schema({}, vol.PREVENT_EXTRA) def make_entity_service_schema( - schema: dict, *, extra: int = vol.PREVENT_EXTRA -) -> vol.Schema: + schema: dict | None, *, extra: int = vol.PREVENT_EXTRA +) -> VolSchemaType: """Create an entity service schema.""" if not schema and extra == vol.PREVENT_EXTRA: # If the schema is empty and we don't allow extra keys, we can return # the base schema and avoid compiling a new schema which is the case # for ~50% of services. return BASE_ENTITY_SCHEMA - return _make_entity_service_schema(schema, extra) + return _make_entity_service_schema(schema or {}, extra) SCRIPT_CONVERSATION_RESPONSE_SCHEMA = vol.Any(template, None) @@ -1920,3 +2023,32 @@ historic_currency = vol.In( country = vol.In(COUNTRIES, msg="invalid ISO 3166 formatted country") language = vol.In(LANGUAGES, msg="invalid RFC 5646 formatted language") + + +async def async_validate( + hass: HomeAssistant, validator: Callable[[Any], Any], value: Any +) -> Any: + """Async friendly schema validation. + + If a validator decorated with @not_async_friendly is called, validation will be + deferred to an executor. If not, validation will happen in the event loop. + """ + _validating_async.set(True) + try: + return validator(value) + except MustValidateInExecutor: + return await hass.async_add_executor_job( + _validate_in_executor, hass, validator, value + ) + finally: + _validating_async.set(False) + + +def _validate_in_executor( + hass: HomeAssistant, validator: Callable[[Any], Any], value: Any +) -> Any: + _hass.hass = hass + try: + return validator(value) + finally: + _hass.hass = None diff --git a/homeassistant/helpers/entity_component.py b/homeassistant/helpers/entity_component.py index 0034eb1c6fc..76abb3020d1 100644 --- a/homeassistant/helpers/entity_component.py +++ b/homeassistant/helpers/entity_component.py @@ -5,7 +5,6 @@ from __future__ import annotations import asyncio from collections.abc import Callable, Iterable from datetime import timedelta -from functools import partial import logging from types import ModuleType from typing import Any, Generic @@ -258,31 +257,22 @@ class EntityComponent(Generic[_EntityT]): def async_register_entity_service( self, name: str, - schema: VolDictType | VolSchemaType, + schema: VolDictType | VolSchemaType | None, func: str | Callable[..., Any], required_features: list[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, ) -> None: """Register an entity service.""" - if isinstance(schema, dict): - schema = cv.make_entity_service_schema(schema) - - service_func: str | HassJob[..., Any] - service_func = func if isinstance(func, str) else HassJob(func) - - self.hass.services.async_register( + service.async_register_entity_service( + self.hass, self.domain, name, - partial( - service.entity_service_call, - self.hass, - self._entities, - service_func, - required_features=required_features, - ), - schema, - supports_response, + entities=self._entities, + func=func, job_type=HassJobType.Coroutinefunction, + required_features=required_features, + schema=schema, + supports_response=supports_response, ) async def async_setup_platform( diff --git a/homeassistant/helpers/entity_platform.py b/homeassistant/helpers/entity_platform.py index 6774780f00f..ce107d63b73 100644 --- a/homeassistant/helpers/entity_platform.py +++ b/homeassistant/helpers/entity_platform.py @@ -6,7 +6,6 @@ import asyncio from collections.abc import Awaitable, Callable, Coroutine, Iterable from contextvars import ContextVar from datetime import timedelta -from functools import partial from logging import Logger, getLogger from typing import TYPE_CHECKING, Any, Protocol @@ -20,7 +19,6 @@ from homeassistant.core import ( CALLBACK_TYPE, DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, - HassJob, HomeAssistant, ServiceCall, SupportsResponse, @@ -41,7 +39,6 @@ from homeassistant.util.async_ import create_eager_task from homeassistant.util.hass_dict import HassKey from . import ( - config_validation as cv, device_registry as dev_reg, entity_registry as ent_reg, service, @@ -985,7 +982,7 @@ class EntityPlatform: def async_register_entity_service( self, name: str, - schema: VolDictType | VolSchemaType, + schema: VolDictType | VolSchemaType | None, func: str | Callable[..., Any], required_features: Iterable[int] | None = None, supports_response: SupportsResponse = SupportsResponse.NONE, @@ -997,24 +994,16 @@ class EntityPlatform: if self.hass.services.has_service(self.platform_name, name): return - if isinstance(schema, dict): - schema = cv.make_entity_service_schema(schema) - - service_func: str | HassJob[..., Any] - service_func = func if isinstance(func, str) else HassJob(func) - - self.hass.services.async_register( + service.async_register_entity_service( + self.hass, self.platform_name, name, - partial( - service.entity_service_call, - self.hass, - self.domain_platform_entities, - service_func, - required_features=required_features, - ), - schema, - supports_response, + entities=self.domain_platform_entities, + func=func, + job_type=None, + required_features=required_features, + schema=schema, + supports_response=supports_response, ) async def _async_update_entity_states(self) -> None: diff --git a/homeassistant/helpers/event.py b/homeassistant/helpers/event.py index 207dd024b6a..97a85fdde89 100644 --- a/homeassistant/helpers/event.py +++ b/homeassistant/helpers/event.py @@ -976,13 +976,27 @@ class TrackTemplateResultInfo: self.hass = hass self._job = HassJob(action, f"track template result {track_templates}") - for track_template_ in track_templates: - track_template_.template.hass = hass self._track_templates = track_templates self._has_super_template = has_super_template self._last_result: dict[Template, bool | str | TemplateError] = {} + for track_template_ in track_templates: + if track_template_.template.hass: + continue + + # pylint: disable-next=import-outside-toplevel + from .frame import report + + report( + ( + "calls async_track_template_result with template without hass, " + "which will stop working in HA Core 2025.10" + ), + error_if_core=False, + ) + track_template_.template.hass = hass + self._rate_limit = KeyedRateLimit(hass) self._info: dict[Template, RenderInfo] = {} self._track_state_changes: _TrackStateChangeFiltered | None = None diff --git a/homeassistant/helpers/icon.py b/homeassistant/helpers/icon.py index e759719f667..ce8205eb915 100644 --- a/homeassistant/helpers/icon.py +++ b/homeassistant/helpers/icon.py @@ -7,7 +7,7 @@ from collections.abc import Iterable from functools import lru_cache import logging import pathlib -from typing import Any +from typing import Any, cast from homeassistant.core import HomeAssistant, callback from homeassistant.loader import Integration, async_get_integrations @@ -21,12 +21,34 @@ ICON_CACHE: HassKey[_IconsCache] = HassKey("icon_cache") _LOGGER = logging.getLogger(__name__) +def convert_shorthand_service_icon( + value: str | dict[str, str | dict[str, str]], +) -> dict[str, str | dict[str, str]]: + """Convert shorthand service icon to dict.""" + if isinstance(value, str): + return {"service": value} + return value + + +def _load_icons_file( + icons_file: pathlib.Path, +) -> dict[str, Any]: + """Load and parse an icons.json file.""" + icons = load_json_object(icons_file) + if "services" not in icons: + return icons + services = cast(dict[str, str | dict[str, str | dict[str, str]]], icons["services"]) + for service, service_icons in services.items(): + services[service] = convert_shorthand_service_icon(service_icons) + return icons + + def _load_icons_files( icons_files: dict[str, pathlib.Path], ) -> dict[str, dict[str, Any]]: """Load and parse icons.json files.""" return { - component: load_json_object(icons_file) + component: _load_icons_file(icons_file) for component, icons_file in icons_files.items() } diff --git a/homeassistant/helpers/script.py b/homeassistant/helpers/script.py index a1b885d0c52..26a9b6e069e 100644 --- a/homeassistant/helpers/script.py +++ b/homeassistant/helpers/script.py @@ -669,7 +669,6 @@ class _ScriptRun: trace_set_result(wait=self._variables["wait"]) wait_template = self._action[CONF_WAIT_TEMPLATE] - wait_template.hass = self._hass # check if condition already okay if condition.async_template(self._hass, wait_template, self._variables, False): @@ -1429,7 +1428,6 @@ class Script: self._hass = hass self.sequence = sequence - template.attach(hass, self.sequence) self.name = name self.unique_id = f"{domain}.{name}-{id(self)}" self.domain = domain @@ -1459,8 +1457,6 @@ class Script: self._sequence_scripts: dict[int, Script] = {} self.variables = variables self._variables_dynamic = template.is_complex(variables) - if self._variables_dynamic: - template.attach(hass, variables) self._copy_variables_on_run = copy_variables @property diff --git a/homeassistant/helpers/script_variables.py b/homeassistant/helpers/script_variables.py index 043101b9b86..2b4507abd64 100644 --- a/homeassistant/helpers/script_variables.py +++ b/homeassistant/helpers/script_variables.py @@ -36,7 +36,6 @@ class ScriptVariables: """ if self._has_template is None: self._has_template = template.is_complex(self.variables) - template.attach(hass, self.variables) if not self._has_template: if render_as_defaults: diff --git a/homeassistant/helpers/service.py b/homeassistant/helpers/service.py index 58cd4657301..bb9490b9edd 100644 --- a/homeassistant/helpers/service.py +++ b/homeassistant/helpers/service.py @@ -33,6 +33,7 @@ from homeassistant.core import ( Context, EntityServiceResponse, HassJob, + HassJobType, HomeAssistant, ServiceCall, ServiceResponse, @@ -63,7 +64,7 @@ from . import ( ) from .group import expand_entity_ids from .selector import TargetSelector -from .typing import ConfigType, TemplateVarsType, VolSchemaType +from .typing import ConfigType, TemplateVarsType, VolDictType, VolSchemaType if TYPE_CHECKING: from .entity import Entity @@ -365,7 +366,6 @@ def async_prepare_call_from_config( if isinstance(domain_service, template.Template): try: - domain_service.hass = hass domain_service = domain_service.async_render(variables) domain_service = cv.service(domain_service) except TemplateError as ex: @@ -384,10 +384,8 @@ def async_prepare_call_from_config( conf = config[CONF_TARGET] try: if isinstance(conf, template.Template): - conf.hass = hass target.update(conf.async_render(variables)) else: - template.attach(hass, conf) target.update(template.render_complex(conf, variables)) if CONF_ENTITY_ID in target: @@ -413,7 +411,6 @@ def async_prepare_call_from_config( if conf not in config: continue try: - template.attach(hass, config[conf]) render = template.render_complex(config[conf], variables) if not isinstance(render, dict): raise HomeAssistantError( @@ -1244,3 +1241,58 @@ class ReloadServiceHelper[_T]: self._service_running = False self._pending_reload_targets -= reload_targets self._service_condition.notify_all() + + +@callback +def async_register_entity_service( + hass: HomeAssistant, + domain: str, + name: str, + *, + entities: dict[str, Entity], + func: str | Callable[..., Any], + job_type: HassJobType | None, + required_features: Iterable[int] | None = None, + schema: VolDictType | VolSchemaType | None, + supports_response: SupportsResponse = SupportsResponse.NONE, +) -> None: + """Help registering an entity service. + + This is called by EntityComponent.async_register_entity_service and + EntityPlatform.async_register_entity_service and should not be called + directly by integrations. + """ + if schema is None or isinstance(schema, dict): + schema = cv.make_entity_service_schema(schema) + # Do a sanity check to check this is a valid entity service schema, + # the check could be extended to require All/Any to have sub schema(s) + # with all entity service fields + elif not cv.is_entity_service_schema(schema): + # pylint: disable-next=import-outside-toplevel + from .frame import report + + report( + ( + "registers an entity service with a non entity service schema " + "which will stop working in HA Core 2025.9" + ), + error_if_core=False, + ) + + service_func: str | HassJob[..., Any] + service_func = func if isinstance(func, str) else HassJob(func) + + hass.services.async_register( + domain, + name, + partial( + entity_service_call, + hass, + entities, + service_func, + required_features=required_features, + ), + schema, + supports_response, + job_type=job_type, + ) diff --git a/homeassistant/helpers/template.py b/homeassistant/helpers/template.py index 7742418c5a7..12a005cc7d6 100644 --- a/homeassistant/helpers/template.py +++ b/homeassistant/helpers/template.py @@ -495,10 +495,26 @@ class Template: ) def __init__(self, template: str, hass: HomeAssistant | None = None) -> None: - """Instantiate a template.""" + """Instantiate a template. + + Note: A valid hass instance should always be passed in. The hass parameter + will be non optional in Home Assistant Core 2025.10. + """ + # pylint: disable-next=import-outside-toplevel + from .frame import report + if not isinstance(template, str): raise TypeError("Expected template to be a string") + if not hass: + report( + ( + "creates a template object without passing hass, " + "which will stop working in HA Core 2025.10" + ), + error_if_core=False, + ) + self.template: str = template.strip() self._compiled_code: CodeType | None = None self._compiled: jinja2.Template | None = None @@ -2844,6 +2860,7 @@ class TemplateEnvironment(ImmutableSandboxedEnvironment): self.globals["iif"] = iif self.globals["bool"] = forgiving_boolean self.globals["version"] = version + self.globals["zip"] = zip self.tests["is_number"] = is_number self.tests["list"] = _is_list self.tests["set"] = _is_set diff --git a/homeassistant/helpers/trigger_template_entity.py b/homeassistant/helpers/trigger_template_entity.py index 7b1c4ab8078..7f8ad41d7bb 100644 --- a/homeassistant/helpers/trigger_template_entity.py +++ b/homeassistant/helpers/trigger_template_entity.py @@ -30,7 +30,7 @@ from homeassistant.util.json import JSON_DECODE_EXCEPTIONS, json_loads from . import config_validation as cv from .entity import Entity -from .template import attach as template_attach, render_complex +from .template import render_complex from .typing import ConfigType CONF_AVAILABILITY = "availability" @@ -157,11 +157,6 @@ class TriggerBaseEntity(Entity): """Return extra attributes.""" return self._rendered.get(CONF_ATTRIBUTES) - async def async_added_to_hass(self) -> None: - """Handle being added to Home Assistant.""" - await super().async_added_to_hass() - template_attach(self.hass, self._config) - def _set_unique_id(self, unique_id: str | None) -> None: """Set unique id.""" self._unique_id = unique_id diff --git a/homeassistant/package_constraints.txt b/homeassistant/package_constraints.txt index 432e213d267..fd878c1ffcf 100644 --- a/homeassistant/package_constraints.txt +++ b/homeassistant/package_constraints.txt @@ -8,7 +8,7 @@ aiohttp==3.10.5 aiohttp_cors==0.7.0 aiozoneinfo==0.2.1 astral==2.2 -async-interrupt==1.1.2 +async-interrupt==1.2.0 async-upnp-client==0.40.0 atomicwrites-homeassistant==1.4.1 attrs==23.2.0 @@ -18,51 +18,52 @@ bleak-retry-connector==3.5.0 bleak==0.22.2 bluetooth-adapters==0.19.4 bluetooth-auto-recovery==1.4.2 -bluetooth-data-tools==1.19.4 -cached_ipaddress==0.3.0 +bluetooth-data-tools==1.20.0 +cached-ipaddress==0.5.0 certifi>=2021.5.30 ciso8601==2.3.1 cryptography==43.0.0 -dbus-fast==2.22.1 -fnv-hash-fast==0.5.0 +dbus-fast==2.24.0 +fnv-hash-fast==1.0.2 ha-av==10.1.1 ha-ffmpeg==3.2.0 -habluetooth==3.1.3 +habluetooth==3.4.0 hass-nabucasa==0.81.1 hassil==1.7.4 home-assistant-bluetooth==1.12.2 -home-assistant-frontend==20240809.0 -home-assistant-intents==2024.8.7 +home-assistant-frontend==20240904.0 +home-assistant-intents==2024.9.4 httpx==0.27.0 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 mutagen==1.47.0 -orjson==3.10.6 +orjson==3.10.7 packaging>=23.1 paho-mqtt==1.6.1 Pillow==10.4.0 pip>=21.3.1 psutil-home-assistant==0.0.1 -PyJWT==2.8.0 +PyJWT==2.9.0 pymicro-vad==1.0.1 PyNaCl==1.5.0 pyOpenSSL==24.2.1 pyserial==3.5 +pyspeex-noise==1.0.2 python-slugify==8.0.4 -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 pyudev==0.24.1 -PyYAML==6.0.1 +PyYAML==6.0.2 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.13.1 +ulid-transform==1.0.2 urllib3>=1.26.5,<2 voluptuous-openapi==0.0.5 voluptuous-serialize==2.6.0 voluptuous==0.15.2 -yarl==1.9.4 -zeroconf==0.132.2 +yarl==1.9.7 +zeroconf==0.133.0 # Constrain pycryptodome to avoid vulnerability # see https://github.com/home-assistant/core/pull/16238 @@ -79,11 +80,6 @@ grpcio==1.59.0 grpcio-status==1.59.0 grpcio-reflection==1.59.0 -# libcst >=0.4.0 requires a newer Rust than we currently have available, -# thus our wheels builds fail. This pins it to the last working version, -# which at this point satisfies our needs. -libcst==0.3.23 - # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -98,11 +94,6 @@ enum34==1000000000.0.0 typing==1000000000.0.0 uuid==1000000000.0.0 -# regex causes segfault with version 2021.8.27 -# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error -# This is fixed in 2021.8.28 -regex==2021.8.28 - # httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these @@ -118,12 +109,6 @@ hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env numpy==1.26.0 -# Prevent dependency conflicts between sisyphus-control and aioambient -# until upper bounds for sisyphus-control have been updated -# https://github.com/jkeljo/sisyphus-control/issues/6 -python-engineio>=3.13.1,<4.0 -python-socketio>=4.6.0,<5.0 - # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 multidict>=6.0.2 @@ -152,7 +137,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.1 +protobuf==4.25.4 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -166,9 +151,6 @@ websockets>=11.0.1 # pysnmplib is no longer maintained and does not work with newer # python pysnmplib==1000000000.0.0 -# pysnmp is no longer maintained and does not work with newer -# python -pysnmp==1000000000.0.0 # The get-mac package has been replaced with getmac. Installing get-mac alongside getmac # breaks getmac due to them both sharing the same python package name inside 'getmac'. diff --git a/homeassistant/runner.py b/homeassistant/runner.py index a1510336302..4bac12ec399 100644 --- a/homeassistant/runner.py +++ b/homeassistant/runner.py @@ -107,7 +107,6 @@ class HassEventLoopPolicy(asyncio.DefaultEventLoopPolicy): def new_event_loop(self) -> asyncio.AbstractEventLoop: """Get the event loop.""" loop: asyncio.AbstractEventLoop = super().new_event_loop() - setattr(loop, "_thread_ident", threading.get_ident()) loop.set_exception_handler(_async_loop_exception_handler) if self.debug: loop.set_debug(True) diff --git a/homeassistant/scripts/auth.py b/homeassistant/scripts/auth.py index fff57c7adfe..b034021e6e7 100644 --- a/homeassistant/scripts/auth.py +++ b/homeassistant/scripts/auth.py @@ -2,8 +2,10 @@ import argparse import asyncio +from collections.abc import Sequence import logging import os +from typing import TYPE_CHECKING from homeassistant import runner from homeassistant.auth import auth_manager_from_config @@ -15,7 +17,7 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er # mypy: allow-untyped-calls, allow-untyped-defs -def run(args): +def run(args: Sequence[str] | None) -> None: """Handle Home Assistant auth provider script.""" parser = argparse.ArgumentParser(description="Manage Home Assistant users") parser.add_argument("--script", choices=["auth"]) @@ -50,7 +52,7 @@ def run(args): asyncio.run(run_command(parser.parse_args(args))) -async def run_command(args): +async def run_command(args: argparse.Namespace) -> None: """Run the command.""" hass = HomeAssistant(os.path.join(os.getcwd(), args.config)) await asyncio.gather(dr.async_load(hass), er.async_load(hass)) @@ -65,9 +67,13 @@ async def run_command(args): await hass.async_stop() -async def list_users(hass, provider, args): +async def list_users( + hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace +) -> None: """List the users.""" count = 0 + if TYPE_CHECKING: + assert provider.data for user in provider.data.users: count += 1 print(user["username"]) @@ -76,8 +82,12 @@ async def list_users(hass, provider, args): print("Total users:", count) -async def add_user(hass, provider, args): +async def add_user( + hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace +) -> None: """Create a user.""" + if TYPE_CHECKING: + assert provider.data try: provider.data.add_auth(args.username, args.password) except hass_auth.InvalidUser: @@ -89,8 +99,12 @@ async def add_user(hass, provider, args): print("Auth created") -async def validate_login(hass, provider, args): +async def validate_login( + hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace +) -> None: """Validate a login.""" + if TYPE_CHECKING: + assert provider.data try: provider.data.validate_login(args.username, args.password) print("Auth valid") @@ -98,8 +112,12 @@ async def validate_login(hass, provider, args): print("Auth invalid") -async def change_password(hass, provider, args): +async def change_password( + hass: HomeAssistant, provider: hass_auth.HassAuthProvider, args: argparse.Namespace +) -> None: """Change password.""" + if TYPE_CHECKING: + assert provider.data try: provider.data.change_password(args.username, args.new_password) await provider.data.async_save() diff --git a/homeassistant/scripts/benchmark/__init__.py b/homeassistant/scripts/benchmark/__init__.py index 34bc536502f..b769d385a4f 100644 --- a/homeassistant/scripts/benchmark/__init__.py +++ b/homeassistant/scripts/benchmark/__init__.py @@ -4,10 +4,8 @@ from __future__ import annotations import argparse import asyncio -import collections from collections.abc import Callable from contextlib import suppress -import json import logging from timeit import default_timer as timer @@ -18,7 +16,7 @@ from homeassistant.helpers.event import ( async_track_state_change, async_track_state_change_event, ) -from homeassistant.helpers.json import JSON_DUMP, JSONEncoder +from homeassistant.helpers.json import JSON_DUMP # mypy: allow-untyped-calls, allow-untyped-defs, no-check-untyped-defs # mypy: no-warn-return-any @@ -310,48 +308,3 @@ async def json_serialize_states(hass): start = timer() JSON_DUMP(states) return timer() - start - - -def _create_state_changed_event_from_old_new( - entity_id, event_time_fired, old_state, new_state -): - """Create a state changed event from a old and new state.""" - attributes = {} - if new_state is not None: - attributes = new_state.get("attributes") - attributes_json = json.dumps(attributes, cls=JSONEncoder) - if attributes_json == "null": - attributes_json = "{}" - row = collections.namedtuple( - "Row", - [ - "event_type" - "event_data" - "time_fired" - "context_id" - "context_user_id" - "state" - "entity_id" - "domain" - "attributes" - "state_id", - "old_state_id", - ], - ) - - row.event_type = EVENT_STATE_CHANGED - row.event_data = "{}" - row.attributes = attributes_json - row.time_fired = event_time_fired - row.state = new_state and new_state.get("state") - row.entity_id = entity_id - row.domain = entity_id and core.split_entity_id(entity_id)[0] - row.context_id = None - row.context_user_id = None - row.old_state_id = old_state and 1 - row.state_id = new_state and 1 - - # pylint: disable-next=import-outside-toplevel - from homeassistant.components import logbook - - return logbook.LazyEventPartialState(row, {}) diff --git a/homeassistant/scripts/macos/__init__.py b/homeassistant/scripts/macos/__init__.py index f629492ec39..0bf88da81dc 100644 --- a/homeassistant/scripts/macos/__init__.py +++ b/homeassistant/scripts/macos/__init__.py @@ -44,7 +44,7 @@ def uninstall_osx(): print("Home Assistant has been uninstalled.") -def run(args): +def run(args: list[str]) -> int: """Handle OSX commandline script.""" commands = "install", "uninstall", "restart" if not args or args[0] not in commands: @@ -63,3 +63,5 @@ def run(args): time.sleep(0.5) install_osx() return 0 + + raise ValueError(f"Invalid command {args[0]}") diff --git a/homeassistant/util/async_.py b/homeassistant/util/async_.py index f2dc1291324..d010d8cb341 100644 --- a/homeassistant/util/async_.py +++ b/homeassistant/util/async_.py @@ -2,7 +2,15 @@ from __future__ import annotations -from asyncio import AbstractEventLoop, Future, Semaphore, Task, gather, get_running_loop +from asyncio import ( + AbstractEventLoop, + Future, + Semaphore, + Task, + TimerHandle, + gather, + get_running_loop, +) from collections.abc import Awaitable, Callable, Coroutine import concurrent.futures import logging @@ -49,7 +57,7 @@ def run_callback_threadsafe[_T, *_Ts]( Return a concurrent.futures.Future to access the result. """ - if (ident := loop.__dict__.get("_thread_ident")) and ident == threading.get_ident(): + if (ident := loop.__dict__.get("_thread_id")) and ident == threading.get_ident(): raise RuntimeError("Cannot be called from within the event loop") future: concurrent.futures.Future[_T] = concurrent.futures.Future() @@ -124,3 +132,9 @@ def shutdown_run_callback_threadsafe(loop: AbstractEventLoop) -> None: python is going to exit. """ setattr(loop, _SHUTDOWN_RUN_CALLBACK_THREADSAFE, True) + + +def get_scheduled_timer_handles(loop: AbstractEventLoop) -> list[TimerHandle]: + """Return a list of scheduled TimerHandles.""" + handles: list[TimerHandle] = loop._scheduled # type: ignore[attr-defined] # noqa: SLF001 + return handles diff --git a/homeassistant/util/executor.py b/homeassistant/util/executor.py index 47b6d08a197..5f0fdd5c273 100644 --- a/homeassistant/util/executor.py +++ b/homeassistant/util/executor.py @@ -63,10 +63,18 @@ def join_or_interrupt_threads( class InterruptibleThreadPoolExecutor(ThreadPoolExecutor): """A ThreadPoolExecutor instance that will not deadlock on shutdown.""" - def shutdown(self, *args: Any, **kwargs: Any) -> None: - """Shutdown with interrupt support added.""" + def shutdown( + self, *args: Any, join_threads_or_timeout: bool = True, **kwargs: Any + ) -> None: + """Shutdown with interrupt support added. + + By default shutdown will wait for threads to finish up + to the timeout before forcefully stopping them. This can + be disabled by setting `join_threads_or_timeout` to False. + """ super().shutdown(wait=False, cancel_futures=True) - self.join_threads_or_timeout() + if join_threads_or_timeout: + self.join_threads_or_timeout() def join_threads_or_timeout(self) -> None: """Join threads or timeout.""" diff --git a/homeassistant/util/language.py b/homeassistant/util/language.py index 8644f8014b6..8a82de9065f 100644 --- a/homeassistant/util/language.py +++ b/homeassistant/util/language.py @@ -137,9 +137,6 @@ class Dialect: region_idx = pref_regions.index(self.region) elif dialect.region is not None: region_idx = pref_regions.index(dialect.region) - else: - # Can't happen, but mypy is not smart enough - raise ValueError # More preferred regions are at the front. # Add 1 to boost above a weak match where no regions are set. diff --git a/homeassistant/util/unit_conversion.py b/homeassistant/util/unit_conversion.py index 2b9f73afab7..d5586704fc5 100644 --- a/homeassistant/util/unit_conversion.py +++ b/homeassistant/util/unit_conversion.py @@ -68,7 +68,6 @@ class BaseUnitConverter: """Define the format of a conversion utility.""" UNIT_CLASS: str - NORMALIZED_UNIT: str | None VALID_UNITS: set[str | None] _UNIT_CONVERSION: dict[str | None, float] @@ -125,7 +124,6 @@ class DataRateConverter(BaseUnitConverter): """Utility to convert data rate values.""" UNIT_CLASS = "data_rate" - NORMALIZED_UNIT = UnitOfDataRate.BITS_PER_SECOND # Units in terms of bits _UNIT_CONVERSION: dict[str | None, float] = { UnitOfDataRate.BITS_PER_SECOND: 1, @@ -147,7 +145,6 @@ class DistanceConverter(BaseUnitConverter): """Utility to convert distance values.""" UNIT_CLASS = "distance" - NORMALIZED_UNIT = UnitOfLength.METERS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfLength.METERS: 1, UnitOfLength.MILLIMETERS: 1 / _MM_TO_M, @@ -174,7 +171,6 @@ class ConductivityConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "conductivity" - NORMALIZED_UNIT = UnitOfConductivity.MICROSIEMENS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfConductivity.MICROSIEMENS: 1, UnitOfConductivity.MILLISIEMENS: 1e-3, @@ -187,7 +183,6 @@ class ElectricCurrentConverter(BaseUnitConverter): """Utility to convert electric current values.""" UNIT_CLASS = "electric_current" - NORMALIZED_UNIT = UnitOfElectricCurrent.AMPERE _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricCurrent.AMPERE: 1, UnitOfElectricCurrent.MILLIAMPERE: 1e3, @@ -199,7 +194,6 @@ class ElectricPotentialConverter(BaseUnitConverter): """Utility to convert electric potential values.""" UNIT_CLASS = "voltage" - NORMALIZED_UNIT = UnitOfElectricPotential.VOLT _UNIT_CONVERSION: dict[str | None, float] = { UnitOfElectricPotential.VOLT: 1, UnitOfElectricPotential.MILLIVOLT: 1e3, @@ -214,7 +208,6 @@ class EnergyConverter(BaseUnitConverter): """Utility to convert energy values.""" UNIT_CLASS = "energy" - NORMALIZED_UNIT = UnitOfEnergy.KILO_WATT_HOUR _UNIT_CONVERSION: dict[str | None, float] = { UnitOfEnergy.WATT_HOUR: 1 * 1000, UnitOfEnergy.KILO_WATT_HOUR: 1, @@ -235,7 +228,6 @@ class InformationConverter(BaseUnitConverter): """Utility to convert information values.""" UNIT_CLASS = "information" - NORMALIZED_UNIT = UnitOfInformation.BITS # Units in terms of bits _UNIT_CONVERSION: dict[str | None, float] = { UnitOfInformation.BITS: 1, @@ -267,7 +259,6 @@ class MassConverter(BaseUnitConverter): """Utility to convert mass values.""" UNIT_CLASS = "mass" - NORMALIZED_UNIT = UnitOfMass.GRAMS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfMass.MICROGRAMS: 1 * 1000 * 1000, UnitOfMass.MILLIGRAMS: 1 * 1000, @@ -292,7 +283,6 @@ class PowerConverter(BaseUnitConverter): """Utility to convert power values.""" UNIT_CLASS = "power" - NORMALIZED_UNIT = UnitOfPower.WATT _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPower.WATT: 1, UnitOfPower.KILO_WATT: 1 / 1000, @@ -307,7 +297,6 @@ class PressureConverter(BaseUnitConverter): """Utility to convert pressure values.""" UNIT_CLASS = "pressure" - NORMALIZED_UNIT = UnitOfPressure.PA _UNIT_CONVERSION: dict[str | None, float] = { UnitOfPressure.PA: 1, UnitOfPressure.HPA: 1 / 100, @@ -338,7 +327,6 @@ class SpeedConverter(BaseUnitConverter): """Utility to convert speed values.""" UNIT_CLASS = "speed" - NORMALIZED_UNIT = UnitOfSpeed.METERS_PER_SECOND _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolumetricFlux.INCHES_PER_DAY: _DAYS_TO_SECS / _IN_TO_M, UnitOfVolumetricFlux.INCHES_PER_HOUR: _HRS_TO_SECS / _IN_TO_M, @@ -433,7 +421,6 @@ class TemperatureConverter(BaseUnitConverter): """Utility to convert temperature values.""" UNIT_CLASS = "temperature" - NORMALIZED_UNIT = UnitOfTemperature.CELSIUS VALID_UNITS = { UnitOfTemperature.CELSIUS, UnitOfTemperature.FAHRENHEIT, @@ -564,7 +551,6 @@ class UnitlessRatioConverter(BaseUnitConverter): """Utility to convert unitless ratios.""" UNIT_CLASS = "unitless" - NORMALIZED_UNIT = None _UNIT_CONVERSION: dict[str | None, float] = { None: 1, CONCENTRATION_PARTS_PER_BILLION: 1000000000, @@ -581,7 +567,6 @@ class VolumeConverter(BaseUnitConverter): """Utility to convert volume values.""" UNIT_CLASS = "volume" - NORMALIZED_UNIT = UnitOfVolume.CUBIC_METERS # Units in terms of m³ _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolume.LITERS: 1 / _L_TO_CUBIC_METER, @@ -607,7 +592,6 @@ class VolumeFlowRateConverter(BaseUnitConverter): """Utility to convert volume values.""" UNIT_CLASS = "volume_flow_rate" - NORMALIZED_UNIT = UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR # Units in terms of m³/h _UNIT_CONVERSION: dict[str | None, float] = { UnitOfVolumeFlowRate.CUBIC_METERS_PER_HOUR: 1, @@ -630,7 +614,6 @@ class DurationConverter(BaseUnitConverter): """Utility to convert duration values.""" UNIT_CLASS = "duration" - NORMALIZED_UNIT = UnitOfTime.SECONDS _UNIT_CONVERSION: dict[str | None, float] = { UnitOfTime.MICROSECONDS: 1000000, UnitOfTime.MILLISECONDS: 1000, diff --git a/homeassistant/util/yaml/loader.py b/homeassistant/util/yaml/loader.py index ff9b7cb3601..a56cf126f79 100644 --- a/homeassistant/util/yaml/loader.py +++ b/homeassistant/util/yaml/loader.py @@ -221,13 +221,21 @@ type LoaderType = FastSafeLoader | PythonSafeLoader def load_yaml( fname: str | os.PathLike[str], secrets: Secrets | None = None ) -> JSON_TYPE | None: - """Load a YAML file.""" + """Load a YAML file. + + If opening the file raises an OSError it will be wrapped in a HomeAssistantError, + except for FileNotFoundError which will be re-raised. + """ try: with open(fname, encoding="utf-8") as conf_file: return parse_yaml(conf_file, secrets) except UnicodeDecodeError as exc: _LOGGER.error("Unable to read file %s: %s", fname, exc) raise HomeAssistantError(exc) from exc + except FileNotFoundError: + raise + except OSError as exc: + raise HomeAssistantError(exc) from exc def load_yaml_dict( @@ -348,6 +356,20 @@ def _add_reference_to_node_class( return obj +def _raise_if_no_value[NodeT: yaml.nodes.Node, _R]( + func: Callable[[LoaderType, NodeT], _R], +) -> Callable[[LoaderType, NodeT], _R]: + def wrapper(loader: LoaderType, node: NodeT) -> _R: + if not node.value: + raise HomeAssistantError( + f"{node.start_mark}: {node.tag} needs an argument." + ) + return func(loader, node) + + return wrapper + + +@_raise_if_no_value def _include_yaml(loader: LoaderType, node: yaml.nodes.Node) -> JSON_TYPE: """Load another YAML file and embed it using the !include tag. @@ -382,6 +404,7 @@ def _find_files(directory: str, pattern: str) -> Iterator[str]: yield filename +@_raise_if_no_value def _include_dir_named_yaml(loader: LoaderType, node: yaml.nodes.Node) -> NodeDictClass: """Load multiple files from directory as a dictionary.""" mapping = NodeDictClass() @@ -399,6 +422,7 @@ def _include_dir_named_yaml(loader: LoaderType, node: yaml.nodes.Node) -> NodeDi return _add_reference_to_node_class(mapping, loader, node) +@_raise_if_no_value def _include_dir_merge_named_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> NodeDictClass: @@ -414,6 +438,7 @@ def _include_dir_merge_named_yaml( return _add_reference_to_node_class(mapping, loader, node) +@_raise_if_no_value def _include_dir_list_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> list[JSON_TYPE]: @@ -427,6 +452,7 @@ def _include_dir_list_yaml( ] +@_raise_if_no_value def _include_dir_merge_list_yaml( loader: LoaderType, node: yaml.nodes.Node ) -> JSON_TYPE: diff --git a/mypy.ini b/mypy.ini index 0f4f8907612..a312a77122f 100644 --- a/mypy.ini +++ b/mypy.ini @@ -705,26 +705,6 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.asterisk_cdr.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - -[mypy-homeassistant.components.asterisk_mbox.*] -check_untyped_defs = true -disallow_incomplete_defs = true -disallow_subclassing_any = true -disallow_untyped_calls = true -disallow_untyped_decorators = true -disallow_untyped_defs = true -warn_return_any = true -warn_unreachable = true - [mypy-homeassistant.components.asuswrt.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1736,6 +1716,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.fujitsu_fglair.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.fully_kiosk.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -1746,6 +1736,16 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true +[mypy-homeassistant.components.fyta.*] +check_untyped_defs = true +disallow_incomplete_defs = true +disallow_subclassing_any = true +disallow_untyped_calls = true +disallow_untyped_decorators = true +disallow_untyped_defs = true +warn_return_any = true +warn_unreachable = true + [mypy-homeassistant.components.generic_hygrostat.*] check_untyped_defs = true disallow_incomplete_defs = true @@ -2696,7 +2696,7 @@ disallow_untyped_defs = true warn_return_any = true warn_unreachable = true -[mypy-homeassistant.components.mailbox.*] +[mypy-homeassistant.components.manual.*] check_untyped_defs = true disallow_incomplete_defs = true disallow_subclassing_any = true diff --git a/pylint/plugins/hass_enforce_type_hints.py b/pylint/plugins/hass_enforce_type_hints.py index 2c58e7aae15..13499134668 100644 --- a/pylint/plugins/hass_enforce_type_hints.py +++ b/pylint/plugins/hass_enforce_type_hints.py @@ -107,6 +107,7 @@ _TEST_FIXTURES: dict[str, list[str] | str] = { "device_registry": "DeviceRegistry", "enable_bluetooth": "None", "enable_custom_integrations": "None", + "enable_missing_statistics": "bool", "enable_nightly_purge": "bool", "enable_statistics": "bool", "enable_schema_validation": "bool", @@ -1760,39 +1761,6 @@ _INHERITANCE_MATCH: dict[str, list[ClassTypeHintMatch]] = { ], ), ], - "mailbox": [ - ClassTypeHintMatch( - base_class="Mailbox", - matches=[ - TypeHintMatch( - function_name="media_type", - return_type="str", - ), - TypeHintMatch( - function_name="can_delete", - return_type="bool", - ), - TypeHintMatch( - function_name="has_media", - return_type="bool", - ), - TypeHintMatch( - function_name="async_get_media", - arg_types={1: "str"}, - return_type="bytes", - ), - TypeHintMatch( - function_name="async_get_messages", - return_type="list[dict[str, Any]]", - ), - TypeHintMatch( - function_name="async_delete", - arg_types={1: "str"}, - return_type="bool", - ), - ], - ), - ], "media_player": [ ClassTypeHintMatch( base_class="Entity", diff --git a/pyproject.toml b/pyproject.toml index 437aea9f097..9a935b3a5fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "homeassistant" -version = "2024.8.3" +version = "2024.9.0" license = {text = "Apache-2.0"} description = "Open-source home automation platform running on Python 3." readme = "README.rst" @@ -29,14 +29,14 @@ dependencies = [ "aiohttp-fast-zlib==0.1.1", "aiozoneinfo==0.2.1", "astral==2.2", - "async-interrupt==1.1.2", + "async-interrupt==1.2.0", "attrs==23.2.0", "atomicwrites-homeassistant==1.4.1", "awesomeversion==24.6.0", "bcrypt==4.1.3", "certifi>=2021.5.30", "ciso8601==2.3.1", - "fnv-hash-fast==0.5.0", + "fnv-hash-fast==1.0.2", # hass-nabucasa is imported by helpers which don't depend on the cloud # integration "hass-nabucasa==0.81.1", @@ -47,21 +47,21 @@ dependencies = [ "ifaddr==0.2.0", "Jinja2==3.1.4", "lru-dict==1.3.0", - "PyJWT==2.8.0", + "PyJWT==2.9.0", # PyJWT has loose dependency. We want the latest one. "cryptography==43.0.0", "Pillow==10.4.0", "pyOpenSSL==24.2.1", - "orjson==3.10.6", + "orjson==3.10.7", "packaging>=23.1", "pip>=21.3.1", "psutil-home-assistant==0.0.1", "python-slugify==8.0.4", - "PyYAML==6.0.1", + "PyYAML==6.0.2", "requests==2.32.3", "SQLAlchemy==2.0.31", "typing-extensions>=4.12.2,<5.0", - "ulid-transform==0.13.1", + "ulid-transform==1.0.2", # Constrain urllib3 to ensure we deal with CVE-2020-26137 and CVE-2021-33503 # Temporary setting an upper bound, to prevent compat issues with urllib3>=2 # https://github.com/home-assistant/core/issues/97248 @@ -69,7 +69,7 @@ dependencies = [ "voluptuous==0.15.2", "voluptuous-serialize==2.6.0", "voluptuous-openapi==0.0.5", - "yarl==1.9.4", + "yarl==1.9.7", ] [project.urls] @@ -312,6 +312,7 @@ disable = [ "no-else-return", # RET505 "broad-except", # BLE001 "protected-access", # SLF001 + "broad-exception-raised", # TRY002 # "no-self-use", # PLR6301 # Optional plugin, not enabled # Handled by mypy @@ -767,6 +768,7 @@ select = [ "SLOT", # flake8-slots "T100", # Trace found: {name} used "T20", # flake8-print + "TCH", # flake8-type-checking "TID251", # Banned imports "TRY", # tryceratops "UP", # pyupgrade @@ -799,6 +801,12 @@ ignore = [ "SIM103", # Return the condition {condition} directly "SIM108", # Use ternary operator {contents} instead of if-else-block "SIM115", # Use context handler for opening files + + # Moving imports into type-checking blocks can mess with pytest.patch() + "TCH001", # Move application import {} into a type-checking block + "TCH002", # Move third-party import {} into a type-checking block + "TCH003", # Move standard library import {} into a type-checking block + "TRY003", # Avoid specifying long messages outside the exception class "TRY400", # Use `logging.exception` instead of `logging.error` # Ignored due to performance: https://github.com/charliermarsh/ruff/issues/2923 @@ -817,15 +825,7 @@ ignore = [ "ISC001", # Disabled because ruff does not understand type of __all__ generated by a function - "PLE0605", - - # temporarily disabled - "PT019", - "PYI024", # Use typing.NamedTuple instead of collections.namedtuple - "RET503", - "RET501", - "TRY002", - "TRY301" + "PLE0605" ] [tool.ruff.lint.flake8-import-conventions.extend-aliases] @@ -851,7 +851,6 @@ voluptuous = "vol" "homeassistant.components.lawn_mower.PLATFORM_SCHEMA" = "LAWN_MOWER_PLATFORM_SCHEMA" "homeassistant.components.light.PLATFORM_SCHEMA" = "LIGHT_PLATFORM_SCHEMA" "homeassistant.components.lock.PLATFORM_SCHEMA" = "LOCK_PLATFORM_SCHEMA" -"homeassistant.components.mailbox.PLATFORM_SCHEMA" = "MAILBOX_PLATFORM_SCHEMA" "homeassistant.components.media_player.PLATFORM_SCHEMA" = "MEDIA_PLAYER_PLATFORM_SCHEMA" "homeassistant.components.notify.PLATFORM_SCHEMA" = "NOTIFY_PLATFORM_SCHEMA" "homeassistant.components.number.PLATFORM_SCHEMA" = "NUMBER_PLATFORM_SCHEMA" diff --git a/requirements.txt b/requirements.txt index 9af81e775ee..fd6e8815e90 100644 --- a/requirements.txt +++ b/requirements.txt @@ -9,36 +9,36 @@ aiohttp_cors==0.7.0 aiohttp-fast-zlib==0.1.1 aiozoneinfo==0.2.1 astral==2.2 -async-interrupt==1.1.2 +async-interrupt==1.2.0 attrs==23.2.0 atomicwrites-homeassistant==1.4.1 awesomeversion==24.6.0 bcrypt==4.1.3 certifi>=2021.5.30 ciso8601==2.3.1 -fnv-hash-fast==0.5.0 +fnv-hash-fast==1.0.2 hass-nabucasa==0.81.1 httpx==0.27.0 home-assistant-bluetooth==1.12.2 ifaddr==0.2.0 Jinja2==3.1.4 lru-dict==1.3.0 -PyJWT==2.8.0 +PyJWT==2.9.0 cryptography==43.0.0 Pillow==10.4.0 pyOpenSSL==24.2.1 -orjson==3.10.6 +orjson==3.10.7 packaging>=23.1 pip>=21.3.1 psutil-home-assistant==0.0.1 python-slugify==8.0.4 -PyYAML==6.0.1 +PyYAML==6.0.2 requests==2.32.3 SQLAlchemy==2.0.31 typing-extensions>=4.12.2,<5.0 -ulid-transform==0.13.1 +ulid-transform==1.0.2 urllib3>=1.26.5,<2 voluptuous==0.15.2 voluptuous-serialize==2.6.0 voluptuous-openapi==0.0.5 -yarl==1.9.4 +yarl==1.9.7 diff --git a/requirements_all.txt b/requirements_all.txt index 48fed02cd53..59e9f95e93e 100644 --- a/requirements_all.txt +++ b/requirements_all.txt @@ -36,7 +36,7 @@ Mastodon.py==1.8.1 Pillow==10.4.0 # homeassistant.components.plex -PlexAPI==4.15.14 +PlexAPI==4.15.16 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -84,7 +84,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.1 +PySwitchbot==0.48.2 # homeassistant.components.switchmate PySwitchmate==0.5.1 @@ -97,10 +97,10 @@ PyTransportNSW==0.1.1 # homeassistant.components.camera # homeassistant.components.stream -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.32.0 +PyViCare-neo==0.2.1 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -179,11 +179,11 @@ aioairq==0.3.2 aioairzone-cloud==0.6.2 # homeassistant.components.airzone -aioairzone==0.8.1 +aioairzone==0.8.2 # homeassistant.components.ambient_network # homeassistant.components.ambient_station -aioambient==2024.01.0 +aioambient==2024.08.0 # homeassistant.components.apcupsd aioapcaccess==0.4.2 @@ -198,10 +198,10 @@ aioaseko==0.2.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.7.3 +aioautomower==2024.8.0 # homeassistant.components.azure_devops -aioazuredevops==2.1.1 +aioazuredevops==2.2.1 # homeassistant.components.baf aiobafi6==0.9.0 @@ -237,7 +237,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.0.0 +aioesphomeapi==25.3.1 # homeassistant.components.flo aioflo==2021.11.0 @@ -276,7 +276,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.0 # homeassistant.components.lifx -aiolifx==1.0.6 +aiolifx==1.0.9 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -285,10 +285,10 @@ aiolivisi==0.0.19 aiolookin==1.0.0 # homeassistant.components.lyric -aiolyric==1.1.0 +aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.8.1 +aiomealie==0.9.2 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -318,7 +318,7 @@ aiooui==0.1.6 aiopegelonline==0.0.10 # homeassistant.components.acmeda -aiopulse==0.4.4 +aiopulse==0.4.6 # homeassistant.components.purpleair aiopurpleair==2022.12.1 @@ -347,10 +347,10 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.34 +aioruckus==0.41 # homeassistant.components.russound_rio -aiorussound==2.2.3 +aiorussound==2.3.2 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -359,7 +359,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.2.4 +aioshelly==11.4.2 # homeassistant.components.skybell aioskybell==22.7.0 @@ -374,7 +374,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==3.4.3 +aioswitcher==4.0.2 # homeassistant.components.syncthing aiosyncthing==0.5.1 @@ -404,7 +404,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.0.2 +aiowithings==3.0.3 # homeassistant.components.yandex_transport aioymaps==1.2.5 @@ -416,7 +416,7 @@ airgradient==0.8.0 airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.0 +airthings-ble==0.9.1 # homeassistant.components.airthings airthings-cloud==0.2.0 @@ -440,7 +440,7 @@ amcrest==1.9.8 androidtv[async]==0.0.73 # homeassistant.components.androidtv_remote -androidtvremote2==0.1.1 +androidtvremote2==0.1.2 # homeassistant.components.anel_pwrctrl anel-pwrctrl-homeassistant==0.0.1.dev2 @@ -451,6 +451,9 @@ anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 +# homeassistant.components.anthropic +anthropic==0.31.2 + # homeassistant.components.weatherkit apple_weatherkit==1.1.2 @@ -461,13 +464,13 @@ apprise==1.8.0 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==1.3.3 +apsystems-ez1==2.2.1 # homeassistant.components.aqualogic aqualogic==2.6 # homeassistant.components.aranet -aranet4==2.3.4 +aranet4==2.4.0 # homeassistant.components.arcam_fmj arcam-fmj==1.5.2 @@ -478,9 +481,6 @@ arris-tg2492lg==2.2.0 # homeassistant.components.ampio asmog==0.0.6 -# homeassistant.components.asterisk_mbox -asterisk_mbox==0.5.0 - # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv @@ -522,6 +522,9 @@ autarco==2.0.0 # homeassistant.components.axis axis==62 +# homeassistant.components.fujitsu_fglair +ayla-iot-unofficial==1.3.1 + # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -556,7 +559,7 @@ beautifulsoup4==4.12.3 # beewi-smartclim==0.0.10 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.1 +bimmer-connected[china]==0.16.3 # homeassistant.components.bizkaibus bizkaibus==0.1.1 @@ -600,7 +603,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.4 +bluetooth-data-tools==1.20.0 # homeassistant.components.bond bond-async==0.2.1 @@ -622,7 +625,7 @@ bring-api==0.8.1 broadlink==0.19.0 # homeassistant.components.brother -brother==4.2.0 +brother==4.3.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -646,7 +649,7 @@ btsmarthub-devicelist==0.2.3 buienradar==1.0.6 # homeassistant.components.dhcp -cached_ipaddress==0.3.0 +cached-ipaddress==0.5.0 # homeassistant.components.caldav caldav==1.3.9 @@ -700,7 +703,7 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.22.1 +dbus-fast==2.24.0 # homeassistant.components.debugpy debugpy==1.8.1 @@ -712,7 +715,7 @@ debugpy==1.8.1 # decora==0.6 # homeassistant.components.ecovacs -deebot-client==8.3.0 +deebot-client==8.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -738,7 +741,7 @@ devolo-home-control-api==0.18.3 devolo-plc-api==1.4.1 # homeassistant.components.chacon_dio -dio-chacon-wifi-api==1.2.0 +dio-chacon-wifi-api==1.2.1 # homeassistant.components.directv directv==0.4.0 @@ -892,14 +895,14 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.5.1 +flipr-api==1.6.0 # homeassistant.components.flux_led flux-led==1.0.4 # homeassistant.components.homekit # homeassistant.components.recorder -fnv-hash-fast==0.5.0 +fnv-hash-fast==1.0.2 # homeassistant.components.foobot foobot_async==1.0.0 @@ -921,13 +924,13 @@ freesms==0.2.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.4.1 +fyta_cli==0.6.6 # homeassistant.components.google_translate gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.2 +gardena-bluetooth==1.4.3 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 @@ -989,7 +992,7 @@ google-cloud-texttospeech==2.16.3 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.7 +google-nest-sdm==5.0.0 # homeassistant.components.google_travel_time googlemaps==2.5.1 @@ -1056,7 +1059,7 @@ ha-philipsjs==3.2.2 habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.1.3 +habluetooth==3.4.0 # homeassistant.components.cloud hass-nabucasa==0.81.1 @@ -1096,19 +1099,19 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.55 +holidays==0.56 # homeassistant.components.frontend -home-assistant-frontend==20240809.0 +home-assistant-frontend==20240904.0 # homeassistant.components.conversation -home-assistant-intents==2024.8.7 +home-assistant-intents==2024.9.4 # homeassistant.components.home_connect homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.1 +homematicip==1.1.2 # homeassistant.components.horizon horimote==0.4.1 @@ -1176,7 +1179,7 @@ inkbird-ble==0.5.8 insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire -intellifire4py==2.2.2 +intellifire4py==4.1.9 # homeassistant.components.iotty iottycloud==0.1.3 @@ -1222,7 +1225,7 @@ kiwiki-client==0.1.1 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.9.225351 +knx-frontend==2024.9.4.64538 # homeassistant.components.konnected konnected==1.2.0 @@ -1239,6 +1242,9 @@ lakeside==0.13 # homeassistant.components.laundrify laundrify-aio==1.2.2 +# homeassistant.components.lcn +lcn-frontend==0.1.6 + # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1276,14 +1282,11 @@ linear-garage-door==0.2.9 linode-api==4.1.9b1 # homeassistant.components.lamarzocco -lmcloud==1.1.13 +lmcloud==1.2.2 # homeassistant.components.google_maps locationsharinglib==5.0.1 -# homeassistant.components.logi_circle -logi-circle==0.2.3 - # homeassistant.components.london_underground london-tube-status==0.5 @@ -1303,7 +1306,7 @@ lw12==0.9.2 lxml==5.1.0 # homeassistant.components.matrix -matrix-nio==0.24.0 +matrix-nio==0.25.0 # homeassistant.components.maxcube maxcube-api==0.4.3 @@ -1327,7 +1330,7 @@ melnor-bluetooth==0.0.25 messagebird==1.2.0 # homeassistant.components.meteoalarm -meteoalertapi==0.3.0 +meteoalertapi==0.3.1 # homeassistant.components.meteo_france meteofrance-api==1.3.0 @@ -1363,10 +1366,10 @@ monzopy==1.3.2 mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.23 +motionblinds==0.6.24 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.0 +motionblindsble==0.1.1 # homeassistant.components.motioneye motioneye-client==0.3.14 @@ -1420,11 +1423,14 @@ nextcloudmonitor==1.5.1 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.1.0 +nextdns==3.2.0 # homeassistant.components.nibe_heatpump nibe==2.11.0 +# homeassistant.components.nice_go +nice-go==0.3.8 + # homeassistant.components.niko_home_control niko-home-control==0.2.1 @@ -1517,7 +1523,7 @@ openwrt-luci-rpc==1.1.17 openwrt-ubus-rpc==0.0.2 # homeassistant.components.opower -opower==0.6.0 +opower==0.7.0 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1650,7 +1656,7 @@ py-dormakaba-dkey==1.0.5 py-improv-ble-client==1.0.3 # homeassistant.components.madvr -py-madvr2==1.6.29 +py-madvr2==1.6.32 # homeassistant.components.melissa py-melissa-climate==2.1.4 @@ -1668,7 +1674,7 @@ py-schluter==0.1.7 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.4.5 +py-synologydsm-api==2.5.2 # homeassistant.components.zabbix py-zabbix==1.1.7 @@ -1680,7 +1686,7 @@ pyAtome==0.1.1 pyCEC==0.5.2 # homeassistant.components.control4 -pyControl4==1.1.0 +pyControl4==1.2.0 # homeassistant.components.duotecno pyDuotecno==2024.5.1 @@ -1689,7 +1695,7 @@ pyDuotecno==2024.5.1 pyElectra==1.2.4 # homeassistant.components.emby -pyEmby==1.9 +pyEmby==1.10 # homeassistant.components.hikvision pyHik==0.3.2 @@ -1701,7 +1707,7 @@ pyRFXtrx==0.31.1 pySDCP==1 # homeassistant.components.tibber -pyTibber==0.28.2 +pyTibber==0.30.1 # homeassistant.components.dlink pyW215==0.7.0 @@ -1735,10 +1741,10 @@ pyasuswrt==0.1.21 pyatag==0.3.5.3 # homeassistant.components.netatmo -pyatmo==8.0.3 +pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.14.3 +pyatv==0.15.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 @@ -1783,7 +1789,7 @@ pycmus==0.1.1 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.1.5 +pycoolmasternet-async==0.2.2 # homeassistant.components.microsoft pycsspeechtts==1.0.8 @@ -1792,7 +1798,7 @@ pycsspeechtts==1.0.8 # pycups==1.9.73 # homeassistant.components.daikin -pydaikin==2.13.4 +pydaikin==2.13.6 # homeassistant.components.danfoss_air pydanfossair==0.1.0 @@ -1813,7 +1819,7 @@ pydiscovergy==3.0.1 pydoods==1.0.2 # homeassistant.components.hydrawise -pydrawise==2024.6.4 +pydrawise==2024.8.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1873,7 +1879,7 @@ pyfido==2.1.2 pyfireservicerota==0.0.43 # homeassistant.components.flic -pyflic==2.0.3 +pyflic==2.0.4 # homeassistant.components.futurenow pyfnip==0.2 @@ -2103,7 +2109,7 @@ pyownet==0.10.0.post1 pypca==0.0.7 # homeassistant.components.lcn -pypck==0.7.20 +pypck==0.7.21 # homeassistant.components.pjlink pypjlink2==1.2.1 @@ -2166,7 +2172,7 @@ pysaj==0.0.16 pyschlage==2024.8.0 # homeassistant.components.sensibo -pysensibo==1.0.36 +pysensibo==1.1.0 # homeassistant.components.serial pyserial-asyncio-fast==0.13 @@ -2181,7 +2187,7 @@ pyserial==3.5 pysesame2==1.0.1 # homeassistant.components.seventeentrack -pyseventeentrack==1.0.0 +pyseventeentrack==1.0.1 # homeassistant.components.sia pysiaalarm==3.1.1 @@ -2207,8 +2213,11 @@ pysmartthings==0.7.8 # homeassistant.components.edl21 pysml==0.0.12 +# homeassistant.components.smlight +pysmlight==0.0.13 + # homeassistant.components.snmp -pysnmp-lextudio==6.0.11 +pysnmp==6.2.5 # homeassistant.components.snooz pysnooz==0.8.6 @@ -2219,6 +2228,9 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 +# homeassistant.components.assist_pipeline +pyspeex-noise==1.0.2 + # homeassistant.components.squeezebox pysqueezebox==0.7.1 @@ -2253,7 +2265,7 @@ python-awair==0.2.4 python-blockchain-api==0.0.2 # homeassistant.components.bsblan -python-bsblan==0.5.18 +python-bsblan==0.6.2 # homeassistant.components.clementine python-clementine-remote==1.0.1 @@ -2286,7 +2298,7 @@ python-gitlab==1.6.0 python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.2.0 +python-homewizard-energy==v6.3.0 # homeassistant.components.hp_ilo python-hpilo==4.4.3 @@ -2301,10 +2313,10 @@ python-join-api==0.0.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.1 +python-kasa[speedups]==0.7.2 # homeassistant.components.linkplay -python-linkplay==0.0.6 +python-linkplay==0.0.9 # homeassistant.components.lirc # python-lirc==1.2.3 @@ -2356,7 +2368,7 @@ python-tado==0.17.6 python-technove==1.3.1 # homeassistant.components.telegram_bot -python-telegram-bot[socks]==21.0.1 +python-telegram-bot[socks]==21.5 # homeassistant.components.vlc python-vlc==3.0.18122 @@ -2373,6 +2385,9 @@ pytomorrowio==0.3.6 # homeassistant.components.touchline pytouchline==0.7 +# homeassistant.components.touchline_sl +pytouchlinesl==0.1.5 + # homeassistant.components.traccar # homeassistant.components.traccar_server pytraccar==2.1.1 @@ -2483,13 +2498,13 @@ refoss-ha==1.2.4 regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.5 +renault-api==0.2.7 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.7 +reolink-aio==0.9.8 # homeassistant.components.idteck_prox rfk101py==0.0.1 @@ -2498,7 +2513,7 @@ rfk101py==0.0.1 rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.12 +ring-doorbell[listen]==0.9.0 # homeassistant.components.fleetgo ritassist==0.9.2 @@ -2604,7 +2619,7 @@ simplepush==2.2.3 simplisafe-python==2024.01.0 # homeassistant.components.sisyphus -sisyphus-control==3.1.3 +sisyphus-control==3.1.4 # homeassistant.components.slack slackclient==2.5.0 @@ -2628,7 +2643,7 @@ soco==0.30.4 solaredge-local==0.2.3 # homeassistant.components.solarlog -solarlog_cli==0.1.5 +solarlog_cli==0.1.6 # homeassistant.components.solax solax==3.1.1 @@ -2819,7 +2834,7 @@ twitchAPI==4.2.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==5.4.0 +uiprotect==6.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2848,7 +2863,7 @@ upcloud-api==2.5.1 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.11.0 +uvcclient==0.12.1 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 @@ -2903,7 +2918,7 @@ watchdog==2.3.1 waterfurnace==1.1.0 # homeassistant.components.weatherflow_cloud -weatherflow4py==0.2.21 +weatherflow4py==0.2.23 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 @@ -2952,14 +2967,16 @@ xmltodict==0.13.0 xs1-api-client==3.0.0 # homeassistant.components.yale_smart_alarm -yalesmartalarmclient==0.3.9 +yalesmartalarmclient==0.4.0 # homeassistant.components.august +# homeassistant.components.yale # homeassistant.components.yalexs_ble yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.4.1 +# homeassistant.components.yale +yalexs==8.6.3 # homeassistant.components.yeelight yeelight==0.7.14 @@ -2986,13 +3003,13 @@ zamg==0.3.6 zengge==0.2 # homeassistant.components.zeroconf -zeroconf==0.132.2 +zeroconf==0.133.0 # homeassistant.components.zeversolar zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.31 +zha==0.0.32 # homeassistant.components.zhong_hong zhong-hong-hvac==1.0.12 diff --git a/requirements_test_all.txt b/requirements_test_all.txt index 5df5a0836c7..ace1c743fe0 100644 --- a/requirements_test_all.txt +++ b/requirements_test_all.txt @@ -36,7 +36,7 @@ Mastodon.py==1.8.1 Pillow==10.4.0 # homeassistant.components.plex -PlexAPI==4.15.14 +PlexAPI==4.15.16 # homeassistant.components.progettihwsw ProgettiHWSW==0.1.3 @@ -81,7 +81,7 @@ PyQRCode==1.2.1 PyRMVtransport==0.3.3 # homeassistant.components.switchbot -PySwitchbot==0.48.1 +PySwitchbot==0.48.2 # homeassistant.components.syncthru PySyncThru==0.7.10 @@ -91,10 +91,10 @@ PyTransportNSW==0.1.1 # homeassistant.components.camera # homeassistant.components.stream -PyTurboJPEG==1.7.1 +PyTurboJPEG==1.7.5 # homeassistant.components.vicare -PyViCare==2.32.0 +PyViCare-neo==0.2.1 # homeassistant.components.xiaomi_aqara PyXiaomiGateway==0.14.3 @@ -167,11 +167,11 @@ aioairq==0.3.2 aioairzone-cloud==0.6.2 # homeassistant.components.airzone -aioairzone==0.8.1 +aioairzone==0.8.2 # homeassistant.components.ambient_network # homeassistant.components.ambient_station -aioambient==2024.01.0 +aioambient==2024.08.0 # homeassistant.components.apcupsd aioapcaccess==0.4.2 @@ -186,10 +186,10 @@ aioaseko==0.2.0 aioasuswrt==1.4.0 # homeassistant.components.husqvarna_automower -aioautomower==2024.7.3 +aioautomower==2024.8.0 # homeassistant.components.azure_devops -aioazuredevops==2.1.1 +aioazuredevops==2.2.1 # homeassistant.components.baf aiobafi6==0.9.0 @@ -225,7 +225,7 @@ aioelectricitymaps==0.4.0 aioemonitor==1.0.5 # homeassistant.components.esphome -aioesphomeapi==25.0.0 +aioesphomeapi==25.3.1 # homeassistant.components.flo aioflo==2021.11.0 @@ -258,7 +258,7 @@ aiolifx-effects==0.3.2 aiolifx-themes==0.5.0 # homeassistant.components.lifx -aiolifx==1.0.6 +aiolifx==1.0.9 # homeassistant.components.livisi aiolivisi==0.0.19 @@ -267,10 +267,10 @@ aiolivisi==0.0.19 aiolookin==1.0.0 # homeassistant.components.lyric -aiolyric==1.1.0 +aiolyric==2.0.1 # homeassistant.components.mealie -aiomealie==0.8.1 +aiomealie==0.9.2 # homeassistant.components.modern_forms aiomodernforms==0.1.8 @@ -300,7 +300,7 @@ aiooui==0.1.6 aiopegelonline==0.0.10 # homeassistant.components.acmeda -aiopulse==0.4.4 +aiopulse==0.4.6 # homeassistant.components.purpleair aiopurpleair==2022.12.1 @@ -329,10 +329,10 @@ aiorecollect==2023.09.0 aioridwell==2024.01.0 # homeassistant.components.ruckus_unleashed -aioruckus==0.34 +aioruckus==0.41 # homeassistant.components.russound_rio -aiorussound==2.2.3 +aiorussound==2.3.2 # homeassistant.components.ruuvi_gateway aioruuvigateway==0.1.0 @@ -341,7 +341,7 @@ aioruuvigateway==0.1.0 aiosenz==1.0.0 # homeassistant.components.shelly -aioshelly==11.2.4 +aioshelly==11.4.2 # homeassistant.components.skybell aioskybell==22.7.0 @@ -356,7 +356,7 @@ aiosolaredge==0.2.0 aiosteamist==1.0.0 # homeassistant.components.switcher_kis -aioswitcher==3.4.3 +aioswitcher==4.0.2 # homeassistant.components.syncthing aiosyncthing==0.5.1 @@ -386,7 +386,7 @@ aiowatttime==0.1.1 aiowebostv==0.4.2 # homeassistant.components.withings -aiowithings==3.0.2 +aiowithings==3.0.3 # homeassistant.components.yandex_transport aioymaps==1.2.5 @@ -398,7 +398,7 @@ airgradient==0.8.0 airly==1.1.0 # homeassistant.components.airthings_ble -airthings-ble==0.9.0 +airthings-ble==0.9.1 # homeassistant.components.airthings airthings-cloud==0.2.0 @@ -416,7 +416,7 @@ amberelectric==1.1.1 androidtv[async]==0.0.73 # homeassistant.components.androidtv_remote -androidtvremote2==0.1.1 +androidtvremote2==0.1.2 # homeassistant.components.anova anova-wifi==0.17.0 @@ -424,6 +424,9 @@ anova-wifi==0.17.0 # homeassistant.components.anthemav anthemav==1.4.1 +# homeassistant.components.anthropic +anthropic==0.31.2 + # homeassistant.components.weatherkit apple_weatherkit==1.1.2 @@ -434,17 +437,14 @@ apprise==1.8.0 aprslib==0.7.2 # homeassistant.components.apsystems -apsystems-ez1==1.3.3 +apsystems-ez1==2.2.1 # homeassistant.components.aranet -aranet4==2.3.4 +aranet4==2.4.0 # homeassistant.components.arcam_fmj arcam-fmj==1.5.2 -# homeassistant.components.asterisk_mbox -asterisk_mbox==0.5.0 - # homeassistant.components.dlna_dmr # homeassistant.components.dlna_dms # homeassistant.components.samsungtv @@ -471,6 +471,9 @@ autarco==2.0.0 # homeassistant.components.axis axis==62 +# homeassistant.components.fujitsu_fglair +ayla-iot-unofficial==1.3.1 + # homeassistant.components.azure_event_hub azure-eventhub==5.11.1 @@ -490,7 +493,7 @@ base36==0.1.1 beautifulsoup4==4.12.3 # homeassistant.components.bmw_connected_drive -bimmer-connected[china]==0.16.1 +bimmer-connected[china]==0.16.3 # homeassistant.components.eq3btsmart # homeassistant.components.esphome @@ -524,7 +527,7 @@ bluetooth-auto-recovery==1.4.2 # homeassistant.components.ld2410_ble # homeassistant.components.led_ble # homeassistant.components.private_ble_device -bluetooth-data-tools==1.19.4 +bluetooth-data-tools==1.20.0 # homeassistant.components.bond bond-async==0.2.1 @@ -542,7 +545,7 @@ bring-api==0.8.1 broadlink==0.19.0 # homeassistant.components.brother -brother==4.2.0 +brother==4.3.0 # homeassistant.components.brottsplatskartan brottsplatskartan==1.0.5 @@ -557,7 +560,7 @@ bthome-ble==3.9.1 buienradar==1.0.6 # homeassistant.components.dhcp -cached_ipaddress==0.3.0 +cached-ipaddress==0.5.0 # homeassistant.components.caldav caldav==1.3.9 @@ -596,13 +599,13 @@ datadog==0.15.0 datapoint==0.9.9 # homeassistant.components.bluetooth -dbus-fast==2.22.1 +dbus-fast==2.24.0 # homeassistant.components.debugpy debugpy==1.8.1 # homeassistant.components.ecovacs -deebot-client==8.3.0 +deebot-client==8.4.0 # homeassistant.components.ihc # homeassistant.components.namecheapdns @@ -628,7 +631,7 @@ devolo-home-control-api==0.18.3 devolo-plc-api==1.4.1 # homeassistant.components.chacon_dio -dio-chacon-wifi-api==1.2.0 +dio-chacon-wifi-api==1.2.1 # homeassistant.components.directv directv==0.4.0 @@ -717,6 +720,9 @@ eternalegypt==0.0.16 # homeassistant.components.eufylife_ble eufylife-ble-client==0.1.8 +# homeassistant.components.evohome +evohome-async==0.4.20 + # homeassistant.components.bryant_evolution evolutionhttp==0.0.18 @@ -748,14 +754,14 @@ fjaraskupan==2.3.0 flexit_bacnet==2.2.1 # homeassistant.components.flipr -flipr-api==1.5.1 +flipr-api==1.6.0 # homeassistant.components.flux_led flux-led==1.0.4 # homeassistant.components.homekit # homeassistant.components.recorder -fnv-hash-fast==0.5.0 +fnv-hash-fast==1.0.2 # homeassistant.components.foobot foobot_async==1.0.0 @@ -771,13 +777,13 @@ freebox-api==1.1.0 fritzconnection[qr]==1.13.2 # homeassistant.components.fyta -fyta_cli==0.4.1 +fyta_cli==0.6.6 # homeassistant.components.google_translate gTTS==2.2.4 # homeassistant.components.gardena_bluetooth -gardena-bluetooth==1.4.2 +gardena-bluetooth==1.4.3 # homeassistant.components.google_assistant_sdk gassist-text==0.0.11 @@ -833,7 +839,7 @@ google-cloud-pubsub==2.13.11 google-generativeai==0.6.0 # homeassistant.components.nest -google-nest-sdm==4.0.7 +google-nest-sdm==5.0.0 # homeassistant.components.google_travel_time googlemaps==2.5.1 @@ -888,7 +894,7 @@ ha-philipsjs==3.2.2 habitipy==0.3.1 # homeassistant.components.bluetooth -habluetooth==3.1.3 +habluetooth==3.4.0 # homeassistant.components.cloud hass-nabucasa==0.81.1 @@ -916,19 +922,19 @@ hole==0.8.0 # homeassistant.components.holiday # homeassistant.components.workday -holidays==0.55 +holidays==0.56 # homeassistant.components.frontend -home-assistant-frontend==20240809.0 +home-assistant-frontend==20240904.0 # homeassistant.components.conversation -home-assistant-intents==2024.8.7 +home-assistant-intents==2024.9.4 # homeassistant.components.home_connect homeconnect==0.8.0 # homeassistant.components.homematicip_cloud -homematicip==1.1.1 +homematicip==1.1.2 # homeassistant.components.remember_the_milk httplib2==0.20.4 @@ -981,7 +987,7 @@ inkbird-ble==0.5.8 insteon-frontend-home-assistant==0.5.0 # homeassistant.components.intellifire -intellifire4py==2.2.2 +intellifire4py==4.1.9 # homeassistant.components.iotty iottycloud==0.1.3 @@ -1015,7 +1021,7 @@ kegtron-ble==0.4.0 knocki==0.3.1 # homeassistant.components.knx -knx-frontend==2024.8.9.225351 +knx-frontend==2024.9.4.64538 # homeassistant.components.konnected konnected==1.2.0 @@ -1029,6 +1035,9 @@ lacrosse-view==1.0.2 # homeassistant.components.laundrify laundrify-aio==1.2.2 +# homeassistant.components.lcn +lcn-frontend==0.1.6 + # homeassistant.components.ld2410_ble ld2410-ble==0.1.1 @@ -1051,10 +1060,7 @@ libsoundtouch==0.8 linear-garage-door==0.2.9 # homeassistant.components.lamarzocco -lmcloud==1.1.13 - -# homeassistant.components.logi_circle -logi-circle==0.2.3 +lmcloud==1.2.2 # homeassistant.components.london_underground london-tube-status==0.5 @@ -1072,7 +1078,7 @@ lupupy==0.3.2 lxml==5.1.0 # homeassistant.components.matrix -matrix-nio==0.24.0 +matrix-nio==0.25.0 # homeassistant.components.maxcube maxcube-api==0.4.3 @@ -1126,10 +1132,10 @@ monzopy==1.3.2 mopeka-iot-ble==0.8.0 # homeassistant.components.motion_blinds -motionblinds==0.6.23 +motionblinds==0.6.24 # homeassistant.components.motionblinds_ble -motionblindsble==0.1.0 +motionblindsble==0.1.1 # homeassistant.components.motioneye motioneye-client==0.3.14 @@ -1174,11 +1180,14 @@ nextcloudmonitor==1.5.1 nextcord==2.6.0 # homeassistant.components.nextdns -nextdns==3.1.0 +nextdns==3.2.0 # homeassistant.components.nibe_heatpump nibe==2.11.0 +# homeassistant.components.nice_go +nice-go==0.3.8 + # homeassistant.components.nfandroidtv notifications-android-tv==0.1.5 @@ -1241,7 +1250,7 @@ openhomedevice==2.2.0 openwebifpy==4.2.7 # homeassistant.components.opower -opower==0.6.0 +opower==0.7.0 # homeassistant.components.oralb oralb-ble==0.17.6 @@ -1339,7 +1348,7 @@ py-dormakaba-dkey==1.0.5 py-improv-ble-client==1.0.3 # homeassistant.components.madvr -py-madvr2==1.6.29 +py-madvr2==1.6.32 # homeassistant.components.melissa py-melissa-climate==2.1.4 @@ -1354,13 +1363,13 @@ py-nightscout==1.2.2 py-sucks==0.9.10 # homeassistant.components.synology_dsm -py-synologydsm-api==2.4.5 +py-synologydsm-api==2.5.2 # homeassistant.components.hdmi_cec pyCEC==0.5.2 # homeassistant.components.control4 -pyControl4==1.1.0 +pyControl4==1.2.0 # homeassistant.components.duotecno pyDuotecno==2024.5.1 @@ -1372,7 +1381,7 @@ pyElectra==1.2.4 pyRFXtrx==0.31.1 # homeassistant.components.tibber -pyTibber==0.28.2 +pyTibber==0.30.1 # homeassistant.components.dlink pyW215==0.7.0 @@ -1400,10 +1409,10 @@ pyasuswrt==0.1.21 pyatag==0.3.5.3 # homeassistant.components.netatmo -pyatmo==8.0.3 +pyatmo==8.1.0 # homeassistant.components.apple_tv -pyatv==0.14.3 +pyatv==0.15.0 # homeassistant.components.aussie_broadband pyaussiebb==0.0.15 @@ -1430,13 +1439,13 @@ pycfdns==3.0.0 pycomfoconnect==0.5.1 # homeassistant.components.coolmaster -pycoolmasternet-async==0.1.5 +pycoolmasternet-async==0.2.2 # homeassistant.components.microsoft pycsspeechtts==1.0.8 # homeassistant.components.daikin -pydaikin==2.13.4 +pydaikin==2.13.6 # homeassistant.components.deconz pydeconz==116 @@ -1448,7 +1457,7 @@ pydexcom==0.2.3 pydiscovergy==3.0.1 # homeassistant.components.hydrawise -pydrawise==2024.6.4 +pydrawise==2024.8.0 # homeassistant.components.android_ip_webcam pydroid-ipcam==2.0.0 @@ -1493,7 +1502,7 @@ pyfido==2.1.2 pyfireservicerota==0.0.43 # homeassistant.components.flic -pyflic==2.0.3 +pyflic==2.0.4 # homeassistant.components.forked_daapd pyforked-daapd==0.1.14 @@ -1678,7 +1687,7 @@ pyoverkiz==1.13.14 pyownet==0.10.0.post1 # homeassistant.components.lcn -pypck==0.7.20 +pypck==0.7.21 # homeassistant.components.pjlink pypjlink2==1.2.1 @@ -1726,7 +1735,7 @@ pysabnzbd==1.1.1 pyschlage==2024.8.0 # homeassistant.components.sensibo -pysensibo==1.0.36 +pysensibo==1.1.0 # homeassistant.components.acer_projector # homeassistant.components.crownstone @@ -1735,7 +1744,7 @@ pysensibo==1.0.36 pyserial==3.5 # homeassistant.components.seventeentrack -pyseventeentrack==1.0.0 +pyseventeentrack==1.0.1 # homeassistant.components.sia pysiaalarm==3.1.1 @@ -1758,8 +1767,11 @@ pysmartthings==0.7.8 # homeassistant.components.edl21 pysml==0.0.12 +# homeassistant.components.smlight +pysmlight==0.0.13 + # homeassistant.components.snmp -pysnmp-lextudio==6.0.11 +pysnmp==6.2.5 # homeassistant.components.snooz pysnooz==0.8.6 @@ -1770,6 +1782,9 @@ pysoma==0.0.12 # homeassistant.components.spc pyspcwebgw==0.7.0 +# homeassistant.components.assist_pipeline +pyspeex-noise==1.0.2 + # homeassistant.components.squeezebox pysqueezebox==0.7.1 @@ -1792,7 +1807,7 @@ python-MotionMount==2.0.0 python-awair==0.2.4 # homeassistant.components.bsblan -python-bsblan==0.5.18 +python-bsblan==0.6.2 # homeassistant.components.ecobee python-ecobee-api==0.2.18 @@ -1807,7 +1822,7 @@ python-fullykiosk==0.0.14 python-homeassistant-analytics==0.7.0 # homeassistant.components.homewizard -python-homewizard-energy==v6.2.0 +python-homewizard-energy==v6.3.0 # homeassistant.components.izone python-izone==1.2.9 @@ -1816,10 +1831,10 @@ python-izone==1.2.9 python-juicenet==1.1.0 # homeassistant.components.tplink -python-kasa[speedups]==0.7.1 +python-kasa[speedups]==0.7.2 # homeassistant.components.linkplay -python-linkplay==0.0.6 +python-linkplay==0.0.9 # homeassistant.components.matter python-matter-server==6.3.0 @@ -1865,7 +1880,7 @@ python-tado==0.17.6 python-technove==1.3.1 # homeassistant.components.telegram_bot -python-telegram-bot[socks]==21.0.1 +python-telegram-bot[socks]==21.5 # homeassistant.components.tile pytile==2023.12.0 @@ -1873,6 +1888,9 @@ pytile==2023.12.0 # homeassistant.components.tomorrowio pytomorrowio==0.3.6 +# homeassistant.components.touchline_sl +pytouchlinesl==0.1.5 + # homeassistant.components.traccar # homeassistant.components.traccar_server pytraccar==2.1.1 @@ -1962,19 +1980,19 @@ refoss-ha==1.2.4 regenmaschine==2024.03.0 # homeassistant.components.renault -renault-api==0.2.5 +renault-api==0.2.7 # homeassistant.components.renson renson-endura-delta==1.7.1 # homeassistant.components.reolink -reolink-aio==0.9.7 +reolink-aio==0.9.8 # homeassistant.components.rflink rflink==0.0.66 # homeassistant.components.ring -ring-doorbell[listen]==0.8.12 +ring-doorbell[listen]==0.9.0 # homeassistant.components.roku rokuecp==0.19.3 @@ -2068,7 +2086,7 @@ snapcast==2.3.6 soco==0.30.4 # homeassistant.components.solarlog -solarlog_cli==0.1.5 +solarlog_cli==0.1.6 # homeassistant.components.solax solax==3.1.1 @@ -2217,7 +2235,7 @@ twitchAPI==4.2.1 uasiren==0.0.1 # homeassistant.components.unifiprotect -uiprotect==5.4.0 +uiprotect==6.0.2 # homeassistant.components.landisgyr_heat_meter ultraheat-api==0.5.7 @@ -2240,7 +2258,7 @@ upcloud-api==2.5.1 url-normalize==1.4.3 # homeassistant.components.uvc -uvcclient==0.11.0 +uvcclient==0.12.1 # homeassistant.components.roborock vacuum-map-parser-roborock==0.1.2 @@ -2286,7 +2304,7 @@ wallbox==0.7.0 watchdog==2.3.1 # homeassistant.components.weatherflow_cloud -weatherflow4py==0.2.21 +weatherflow4py==0.2.23 # homeassistant.components.webmin webmin-xmlrpc==0.0.2 @@ -2329,14 +2347,16 @@ xknxproject==3.7.1 xmltodict==0.13.0 # homeassistant.components.yale_smart_alarm -yalesmartalarmclient==0.3.9 +yalesmartalarmclient==0.4.0 # homeassistant.components.august +# homeassistant.components.yale # homeassistant.components.yalexs_ble yalexs-ble==2.4.3 # homeassistant.components.august -yalexs==8.4.1 +# homeassistant.components.yale +yalexs==8.6.3 # homeassistant.components.yeelight yeelight==0.7.14 @@ -2357,13 +2377,13 @@ yt-dlp==2024.08.06 zamg==0.3.6 # homeassistant.components.zeroconf -zeroconf==0.132.2 +zeroconf==0.133.0 # homeassistant.components.zeversolar zeversolar==0.3.1 # homeassistant.components.zha -zha==0.0.31 +zha==0.0.32 # homeassistant.components.zwave_js zwave-js-server-python==0.57.0 diff --git a/requirements_test_pre_commit.txt b/requirements_test_pre_commit.txt index d57a005bb5d..0c8d2b3796b 100644 --- a/requirements_test_pre_commit.txt +++ b/requirements_test_pre_commit.txt @@ -1,5 +1,5 @@ # Automatically generated from .pre-commit-config.yaml by gen_requirements_all.py, do not edit codespell==2.3.0 -ruff==0.5.5 +ruff==0.6.2 yamllint==1.35.1 diff --git a/script/amazon_polly.py b/script/amazon_polly.py new file mode 100644 index 00000000000..fcb0a4b7987 --- /dev/null +++ b/script/amazon_polly.py @@ -0,0 +1,70 @@ +"""Helper script to update supported languages for Amazone Polly text-to-speech (TTS). + +N.B. This script requires AWS credentials. +""" + +from dataclasses import dataclass +from pathlib import Path +from typing import Self + +import boto3 + +from .hassfest.serializer import format_python_namespace + + +@dataclass(frozen=True) +class AmazonPollyVoice: + """Amazon Polly Voice.""" + + id: str + name: str + gender: str + language_name: str + language_code: str + supported_engines: set[str] + additional_language_codes: set[str] + + @classmethod + def validate(cls, model: dict[str, str | list[str]]) -> Self: + """Validate data model.""" + return cls( + id=model["Id"], + name=model["Name"], + gender=model["Gender"], + language_name=model["LanguageName"], + language_code=model["LanguageCode"], + supported_engines=set(model["SupportedEngines"]), + additional_language_codes=set(model.get("AdditionalLanguageCodes", [])), + ) + + +def get_all_voices(client: boto3.client) -> list[AmazonPollyVoice]: + """Get list of all supported voices from Amazon Polly.""" + response = client.describe_voices() + return [AmazonPollyVoice.validate(voice) for voice in response["Voices"]] + + +supported_regions = set( + boto3.session.Session().get_available_regions(service_name="polly") +) + +polly_client = boto3.client(service_name="polly", region_name="us-east-1") +voices = get_all_voices(polly_client) +supported_voices = set({v.id for v in voices}) +supported_engines = set().union(*[v.supported_engines for v in voices]) + +Path("homeassistant/generated/amazon_polly.py").write_text( + format_python_namespace( + { + "SUPPORTED_VOICES": supported_voices, + "SUPPORTED_REGIONS": supported_regions, + "SUPPORTED_ENGINES": supported_engines, + }, + annotations={ + "SUPPORTED_VOICES": "Final[set[str]]", + "SUPPORTED_REGIONS": "Final[set[str]]", + "SUPPORTED_ENGINES": "Final[set[str]]", + }, + generator="script.amazon_polly", + ) +) diff --git a/script/gen_requirements_all.py b/script/gen_requirements_all.py index f887f8113a7..b2165289ad8 100755 --- a/script/gen_requirements_all.py +++ b/script/gen_requirements_all.py @@ -15,7 +15,7 @@ import tomllib from typing import Any from homeassistant.util.yaml.loader import load_yaml -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration # Requirements which can't be installed on all systems because they rely on additional # system packages. Requirements listed in EXCLUDED_REQUIREMENTS_ALL will be commented-out @@ -101,11 +101,6 @@ grpcio==1.59.0 grpcio-status==1.59.0 grpcio-reflection==1.59.0 -# libcst >=0.4.0 requires a newer Rust than we currently have available, -# thus our wheels builds fail. This pins it to the last working version, -# which at this point satisfies our needs. -libcst==0.3.23 - # This is a old unmaintained library and is replaced with pycryptodome pycrypto==1000000000.0.0 @@ -120,11 +115,6 @@ enum34==1000000000.0.0 typing==1000000000.0.0 uuid==1000000000.0.0 -# regex causes segfault with version 2021.8.27 -# https://bitbucket.org/mrabarnett/mrab-regex/issues/421/2021827-results-in-fatal-python-error -# This is fixed in 2021.8.28 -regex==2021.8.28 - # httpx requires httpcore, and httpcore requires anyio and h11, but the version constraints on # these requirements are quite loose. As the entire stack has some outstanding issues, and # even newer versions seem to introduce new issues, it's useful for us to pin all these @@ -140,12 +130,6 @@ hyperframe>=5.2.0 # Ensure we run compatible with musllinux build env numpy==1.26.0 -# Prevent dependency conflicts between sisyphus-control and aioambient -# until upper bounds for sisyphus-control have been updated -# https://github.com/jkeljo/sisyphus-control/issues/6 -python-engineio>=3.13.1,<4.0 -python-socketio>=4.6.0,<5.0 - # Constrain multidict to avoid typing issues # https://github.com/home-assistant/core/pull/67046 multidict>=6.0.2 @@ -174,7 +158,7 @@ pyOpenSSL>=24.0.0 # protobuf must be in package constraints for the wheel # builder to build binary wheels -protobuf==4.25.1 +protobuf==4.25.4 # faust-cchardet: Ensure we have a version we can build wheels # 2.1.18 is the first version that works with our wheel builder @@ -188,9 +172,6 @@ websockets>=11.0.1 # pysnmplib is no longer maintained and does not work with newer # python pysnmplib==1000000000.0.0 -# pysnmp is no longer maintained and does not work with newer -# python -pysnmp==1000000000.0.0 # The get-mac package has been replaced with getmac. Installing get-mac alongside getmac # breaks getmac due to them both sharing the same python package name inside 'getmac'. @@ -289,7 +270,9 @@ def gather_recursive_requirements( seen = set() seen.add(domain) - integration = Integration(Path(f"homeassistant/components/{domain}")) + integration = Integration( + Path(f"homeassistant/components/{domain}"), _get_hassfest_config() + ) integration.load_manifest() reqs = {x for x in integration.requirements if x not in CONSTRAINT_BASE} for dep_domain in integration.dependencies: @@ -355,7 +338,8 @@ def gather_requirements_from_manifests( errors: list[str], reqs: dict[str, list[str]] ) -> None: """Gather all of the requirements from manifests.""" - integrations = Integration.load_dir(Path("homeassistant/components")) + config = _get_hassfest_config() + integrations = Integration.load_dir(config.core_integrations_path, config) for domain in sorted(integrations): integration = integrations[domain] @@ -603,6 +587,17 @@ def main(validate: bool, ci: bool) -> int: return 0 +def _get_hassfest_config() -> Config: + """Get hassfest config.""" + return Config( + root=Path(".").absolute(), + specific_integrations=None, + action="validate", + requirements=True, + core_integrations_path=Path("homeassistant/components"), + ) + + if __name__ == "__main__": _VAL = sys.argv[-1] == "validate" _CI = sys.argv[-1] == "ci" diff --git a/script/hassfest/__main__.py b/script/hassfest/__main__.py index ea3c56200a2..b48871b4651 100644 --- a/script/hassfest/__main__.py +++ b/script/hassfest/__main__.py @@ -107,6 +107,12 @@ def get_config() -> Config: default=ALL_PLUGIN_NAMES, help="Comma-separate list of plugins to run. Valid plugin names: %(default)s", ) + parser.add_argument( + "--core-integrations-path", + type=pathlib.Path, + default=pathlib.Path("homeassistant/components"), + help="Path to core integrations", + ) parsed = parser.parse_args() if parsed.action is None: @@ -129,6 +135,7 @@ def get_config() -> Config: action=parsed.action, requirements=parsed.requirements, plugins=set(parsed.plugins), + core_integrations_path=parsed.core_integrations_path, ) @@ -146,12 +153,12 @@ def main() -> int: integrations = {} for int_path in config.specific_integrations: - integration = Integration(int_path) + integration = Integration(int_path, config) integration.load_manifest() integrations[integration.domain] = integration else: - integrations = Integration.load_dir(pathlib.Path("homeassistant/components")) + integrations = Integration.load_dir(config.core_integrations_path, config) plugins += HASS_PLUGINS for plugin in plugins: diff --git a/script/hassfest/brand.py b/script/hassfest/brand.py index fe47d31067a..6139e12393e 100644 --- a/script/hassfest/brand.py +++ b/script/hassfest/brand.py @@ -18,6 +18,8 @@ BRAND_SCHEMA = vol.Schema( } ) +BRAND_EXCEPTIONS = ["u_tec"] + def _validate_brand( brand: Brand, integrations: dict[str, Integration], config: Config @@ -38,10 +40,14 @@ def _validate_brand( f"Domain '{brand.domain}' does not match file name {brand.path.name}", ) - if not brand.integrations and not brand.iot_standards: + if ( + len(brand.integrations) < 2 + and not brand.iot_standards + and brand.domain not in BRAND_EXCEPTIONS + ): config.add_error( "brand", - f"{brand.path.name}: At least one of integrations or " + f"{brand.path.name}: At least two integrations or " "iot_standards must be non-empty", ) diff --git a/script/hassfest/docker.py b/script/hassfest/docker.py index e38a238be7d..6e39a5c350b 100644 --- a/script/hassfest/docker.py +++ b/script/hassfest/docker.py @@ -1,7 +1,12 @@ """Generate and validate the dockerfile.""" +from dataclasses import dataclass +from pathlib import Path + from homeassistant import core +from homeassistant.const import Platform from homeassistant.util import executor, thread +from script.gen_requirements_all import gather_recursive_requirements from .model import Config, Integration from .requirements import PACKAGE_REGEX, PIP_VERSION_RANGE_SEPARATOR @@ -20,7 +25,7 @@ ENV \ ARG QEMU_CPU # Install uv -RUN pip3 install uv=={uv_version} +RUN pip3 install uv=={uv} WORKDIR /usr/src @@ -61,30 +66,105 @@ COPY rootfs / WORKDIR /config """ +_HASSFEST_TEMPLATE = r"""# Automatically generated by hassfest. +# +# To update, run python3 -m script.hassfest -p docker +FROM python:alpine3.20 -def _get_uv_version() -> str: - with open("requirements_test.txt") as fp: +ENV \ + UV_SYSTEM_PYTHON=true \ + UV_EXTRA_INDEX_URL="https://wheels.home-assistant.io/musllinux-index/" + +SHELL ["/bin/sh", "-o", "pipefail", "-c"] +ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] +WORKDIR "/github/workspace" + +# Install uv +COPY --from=ghcr.io/astral-sh/uv:{uv} /uv /bin/uv + +COPY . /usr/src/homeassistant + +RUN \ + # Required for PyTurboJPEG + apk add --no-cache libturbojpeg \ + && cd /usr/src/homeassistant \ + && uv pip install \ + --no-build \ + --no-cache \ + -c homeassistant/package_constraints.txt \ + -r requirements.txt \ + stdlib-list==0.10.0 pipdeptree=={pipdeptree} tqdm=={tqdm} ruff=={ruff} \ + {required_components_packages} + +LABEL "name"="hassfest" +LABEL "maintainer"="Home Assistant " + +LABEL "com.github.actions.name"="hassfest" +LABEL "com.github.actions.description"="Run hassfest to validate standalone integration repositories" +LABEL "com.github.actions.icon"="terminal" +LABEL "com.github.actions.color"="gray-dark" +""" + + +def _get_package_versions(file: str, packages: set[str]) -> dict[str, str]: + package_versions: dict[str, str] = {} + with open(file, encoding="UTF-8") as fp: for _, line in enumerate(fp): + if package_versions.keys() == packages: + return package_versions + if match := PACKAGE_REGEX.match(line): pkg, sep, version = match.groups() - if pkg != "uv": + if pkg not in packages: continue if sep != "==" or not version: raise RuntimeError( - 'Requirement uv need to be pinned "uv==".' + f'Requirement {pkg} need to be pinned "{pkg}==".' ) for part in version.split(";", 1)[0].split(","): version_part = PIP_VERSION_RANGE_SEPARATOR.match(part) if version_part: - return version_part.group(2) + package_versions[pkg] = version_part.group(2) + break - raise RuntimeError("Invalid uv requirement in requirements_test.txt") + if package_versions.keys() == packages: + return package_versions + + raise RuntimeError("At least one package was not found in the requirements file.") -def _generate_dockerfile() -> str: +@dataclass +class File: + """File.""" + + content: str + path: Path + + +def _generate_hassfest_dockerimage( + config: Config, timeout: int, package_versions: dict[str, str] +) -> File: + packages = set() + already_checked_domains = set() + for platform in Platform: + packages.update( + gather_recursive_requirements(platform.value, already_checked_domains) + ) + + return File( + _HASSFEST_TEMPLATE.format( + timeout=timeout, + required_components_packages=" ".join(sorted(packages)), + **package_versions, + ), + config.root / "script/hassfest/docker/Dockerfile", + ) + + +def _generate_files(config: Config) -> list[File]: timeout = ( core.STOPPING_STAGE_SHUTDOWN_TIMEOUT + core.STOP_STAGE_SHUTDOWN_TIMEOUT @@ -93,27 +173,39 @@ def _generate_dockerfile() -> str: + executor.EXECUTOR_SHUTDOWN_TIMEOUT + thread.THREADING_SHUTDOWN_TIMEOUT + 10 + ) * 1000 + + package_versions = _get_package_versions( + "requirements_test.txt", {"pipdeptree", "tqdm", "uv"} ) - return DOCKERFILE_TEMPLATE.format( - timeout=timeout * 1000, uv_version=_get_uv_version() + package_versions |= _get_package_versions( + "requirements_test_pre_commit.txt", {"ruff"} ) + return [ + File( + DOCKERFILE_TEMPLATE.format(timeout=timeout, **package_versions), + config.root / "Dockerfile", + ), + _generate_hassfest_dockerimage(config, timeout, package_versions), + ] + def validate(integrations: dict[str, Integration], config: Config) -> None: """Validate dockerfile.""" - dockerfile_content = _generate_dockerfile() - config.cache["dockerfile"] = dockerfile_content + docker_files = _generate_files(config) + config.cache["docker"] = docker_files - dockerfile_path = config.root / "Dockerfile" - if dockerfile_path.read_text() != dockerfile_content: - config.add_error( - "docker", - "File Dockerfile is not up to date. Run python3 -m script.hassfest", - fixable=True, - ) + for file in docker_files: + if file.content != file.path.read_text(): + config.add_error( + "docker", + f"File {file.path} is not up to date. Run python3 -m script.hassfest", + fixable=True, + ) def generate(integrations: dict[str, Integration], config: Config) -> None: """Generate dockerfile.""" - dockerfile_path = config.root / "Dockerfile" - dockerfile_path.write_text(config.cache["dockerfile"]) + for file in _generate_files(config): + file.path.write_text(file.content) diff --git a/script/hassfest/docker/Dockerfile b/script/hassfest/docker/Dockerfile new file mode 100644 index 00000000000..fc96653604e --- /dev/null +++ b/script/hassfest/docker/Dockerfile @@ -0,0 +1,37 @@ +# Automatically generated by hassfest. +# +# To update, run python3 -m script.hassfest -p docker +FROM python:alpine3.20 + +ENV \ + UV_SYSTEM_PYTHON=true \ + UV_EXTRA_INDEX_URL="https://wheels.home-assistant.io/musllinux-index/" + +SHELL ["/bin/sh", "-o", "pipefail", "-c"] +ENTRYPOINT ["/usr/src/homeassistant/script/hassfest/docker/entrypoint.sh"] +WORKDIR "/github/workspace" + +# Install uv +COPY --from=ghcr.io/astral-sh/uv:0.2.27 /uv /bin/uv + +COPY . /usr/src/homeassistant + +RUN \ + # Required for PyTurboJPEG + apk add --no-cache libturbojpeg \ + && cd /usr/src/homeassistant \ + && uv pip install \ + --no-build \ + --no-cache \ + -c homeassistant/package_constraints.txt \ + -r requirements.txt \ + stdlib-list==0.10.0 pipdeptree==2.23.1 tqdm==4.66.4 ruff==0.6.2 \ + PyTurboJPEG==1.7.5 ha-ffmpeg==3.2.0 hassil==1.7.4 home-assistant-intents==2024.9.4 mutagen==1.47.0 + +LABEL "name"="hassfest" +LABEL "maintainer"="Home Assistant " + +LABEL "com.github.actions.name"="hassfest" +LABEL "com.github.actions.description"="Run hassfest to validate standalone integration repositories" +LABEL "com.github.actions.icon"="terminal" +LABEL "com.github.actions.color"="gray-dark" diff --git a/script/hassfest/docker/Dockerfile.dockerignore b/script/hassfest/docker/Dockerfile.dockerignore new file mode 100644 index 00000000000..75ed4f0e5d3 --- /dev/null +++ b/script/hassfest/docker/Dockerfile.dockerignore @@ -0,0 +1,8 @@ +# Ignore everything except the specified files +* + +!homeassistant/ +!requirements.txt +!script/ +script/hassfest/docker/ +!script/hassfest/docker/entrypoint.sh diff --git a/script/hassfest/docker/entrypoint.sh b/script/hassfest/docker/entrypoint.sh new file mode 100755 index 00000000000..7b75eb186d2 --- /dev/null +++ b/script/hassfest/docker/entrypoint.sh @@ -0,0 +1,18 @@ +#!/bin/sh + +integrations="" +integration_path="" + +# Enable recursive globbing using find +for manifest in $(find . -name "manifest.json"); do + manifest_path=$(realpath "${manifest}") + integrations="$integrations --integration-path ${manifest_path%/*}" +done + +if [ -z "$integrations" ]; then + echo "Error: No integrations found!" + exit 1 +fi + +cd /usr/src/homeassistant || exit 1 +exec python3 -m script.hassfest --action validate $integrations "$@" diff --git a/script/hassfest/icons.py b/script/hassfest/icons.py index 087d395afeb..f6bcd865c23 100644 --- a/script/hassfest/icons.py +++ b/script/hassfest/icons.py @@ -9,6 +9,7 @@ import voluptuous as vol from voluptuous.humanize import humanize_error import homeassistant.helpers.config_validation as cv +from homeassistant.helpers.icon import convert_shorthand_service_icon from .model import Config, Integration from .translations import translation_key_validator @@ -51,7 +52,7 @@ DATA_ENTRY_ICONS_SCHEMA = vol.Schema( { "step": { str: { - "section": { + "sections": { str: icon_value_validator, } } @@ -60,7 +61,38 @@ DATA_ENTRY_ICONS_SCHEMA = vol.Schema( ) -def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: +CORE_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( + vol.Schema( + { + vol.Optional("service"): icon_value_validator, + vol.Optional("sections"): cv.schema_with_slug_keys( + icon_value_validator, slug_validator=translation_key_validator + ), + } + ), + slug_validator=translation_key_validator, +) + + +CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA = cv.schema_with_slug_keys( + vol.All( + convert_shorthand_service_icon, + vol.Schema( + { + vol.Optional("service"): icon_value_validator, + vol.Optional("sections"): cv.schema_with_slug_keys( + icon_value_validator, slug_validator=translation_key_validator + ), + } + ), + ), + slug_validator=translation_key_validator, +) + + +def icon_schema( + core_integration: bool, integration_type: str, no_entity_platform: bool +) -> vol.Schema: """Create an icon schema.""" state_validator = cv.schema_with_slug_keys( @@ -91,7 +123,9 @@ def icon_schema(integration_type: str, no_entity_platform: bool) -> vol.Schema: {str: {"fix_flow": DATA_ENTRY_ICONS_SCHEMA}} ), vol.Optional("options"): DATA_ENTRY_ICONS_SCHEMA, - vol.Optional("services"): state_validator, + vol.Optional("services"): CORE_SERVICE_ICONS_SCHEMA + if core_integration + else CUSTOM_INTEGRATION_SERVICE_ICONS_SCHEMA, } ) @@ -146,7 +180,9 @@ def validate_icon_file(config: Config, integration: Integration) -> None: no_entity_platform = integration.domain in ("notify", "image_processing") - schema = icon_schema(integration.integration_type, no_entity_platform) + schema = icon_schema( + integration.core, integration.integration_type, no_entity_platform + ) try: schema(icons) diff --git a/script/hassfest/model.py b/script/hassfest/model.py index 736fb6874be..63e9b025ed4 100644 --- a/script/hassfest/model.py +++ b/script/hassfest/model.py @@ -29,6 +29,7 @@ class Config: root: pathlib.Path action: Literal["validate", "generate"] requirements: bool + core_integrations_path: pathlib.Path errors: list[Error] = field(default_factory=list) cache: dict[str, Any] = field(default_factory=dict) plugins: set[str] = field(default_factory=set) @@ -105,7 +106,7 @@ class Integration: """Represent an integration in our validator.""" @classmethod - def load_dir(cls, path: pathlib.Path) -> dict[str, Integration]: + def load_dir(cls, path: pathlib.Path, config: Config) -> dict[str, Integration]: """Load all integrations in a directory.""" assert path.is_dir() integrations: dict[str, Integration] = {} @@ -123,13 +124,14 @@ class Integration: ) continue - integration = cls(fil) + integration = cls(fil, config) integration.load_manifest() integrations[integration.domain] = integration return integrations path: pathlib.Path + _config: Config _manifest: dict[str, Any] | None = None manifest_path: pathlib.Path | None = None errors: list[Error] = field(default_factory=list) @@ -150,7 +152,9 @@ class Integration: @property def core(self) -> bool: """Core integration.""" - return self.path.as_posix().startswith("homeassistant/components") + return self.path.as_posix().startswith( + self._config.core_integrations_path.as_posix() + ) @property def disabled(self) -> str | None: diff --git a/script/hassfest/translations.py b/script/hassfest/translations.py index c39c070eba2..fa12ce626ad 100644 --- a/script/hassfest/translations.py +++ b/script/hassfest/translations.py @@ -41,6 +41,7 @@ ALLOW_NAME_TRANSLATION = { "local_todo", "nmap_tracker", "rpi_power", + "swiss_public_transport", "waze_travel_time", "zodiac", } @@ -166,7 +167,7 @@ def gen_data_entry_schema( vol.Optional("data_description"): {str: translation_value_validator}, vol.Optional("menu_options"): {str: translation_value_validator}, vol.Optional("submit"): translation_value_validator, - vol.Optional("section"): { + vol.Optional("sections"): { str: { vol.Optional("data"): {str: translation_value_validator}, vol.Optional("description"): translation_value_validator, diff --git a/script/licenses.py b/script/licenses.py index 9c584e7f4fc..ac9a836396c 100644 --- a/script/licenses.py +++ b/script/licenses.py @@ -116,6 +116,7 @@ OSI_APPROVED_LICENSES = { "Unlicense", "Apache-2", "GPLv2", + "Python-2.0.1", } EXCEPTIONS = { @@ -128,7 +129,6 @@ EXCEPTIONS = { "aiooui", # https://github.com/Bluetooth-Devices/aiooui/pull/8 "aioruuvigateway", # https://github.com/akx/aioruuvigateway/pull/6 "aiovodafone", # https://github.com/chemelli74/aiovodafone/pull/131 - "airthings-ble", # https://github.com/Airthings/airthings-ble/pull/42 "apple_weatherkit", # https://github.com/tjhorner/python-weatherkit/pull/3 "asyncio", # PSF License "chacha20poly1305", # LGPL @@ -138,7 +138,6 @@ EXCEPTIONS = { "crownstone-core", # https://github.com/crownstone/crownstone-lib-python-core/pull/6 "crownstone-sse", # https://github.com/crownstone/crownstone-lib-python-sse/pull/2 "crownstone-uart", # https://github.com/crownstone/crownstone-lib-python-uart/pull/12 - "dio-chacon-wifi-api", "eliqonline", # https://github.com/molobrakos/eliqonline/pull/17 "enocean", # https://github.com/kipe/enocean/pull/142 "gardena-bluetooth", # https://github.com/elupus/gardena-bluetooth/pull/11 @@ -154,9 +153,7 @@ EXCEPTIONS = { "neurio", # https://github.com/jordanh/neurio-python/pull/13 "nsw-fuel-api-client", # https://github.com/nickw444/nsw-fuel-api-client/pull/14 "pigpio", # https://github.com/joan2937/pigpio/pull/608 - "pyEmby", # https://github.com/mezz64/pyEmby/pull/12 "pymitv", # MIT - "pyTibber", # https://github.com/Danielhiversen/pyTibber/pull/294 "pybbox", # https://github.com/HydrelioxGitHub/pybbox/pull/5 "pyeconet", # https://github.com/w1ll1am23/pyeconet/pull/41 "pysabnzbd", # https://github.com/jeradM/pysabnzbd/pull/6 @@ -178,13 +175,6 @@ TODO = { "aiocache": AwesomeVersion( "0.12.2" ), # https://github.com/aio-libs/aiocache/blob/master/LICENSE all rights reserved? - "asterisk_mbox": AwesomeVersion( - "0.5.0" - ), # No license, integration is deprecated and scheduled for removal in 2024.9.0 - "pyflic": AwesomeVersion("2.0.3"), # No OSI approved license CC0-1.0 Universal) - "uvcclient": AwesomeVersion( - "0.11.0" - ), # No License https://github.com/kk7ds/uvcclient/issues/7 } @@ -204,7 +194,7 @@ def main() -> int: if previous_unapproved_version < package.version: if approved: print( - "Approved license detected for" + "Approved license detected for " f"{package.name}@{package.version}: {package.license}" ) print("Please remove the package from the TODO list.") @@ -218,14 +208,14 @@ def main() -> int: exit_code = 1 elif not approved and package.name not in EXCEPTIONS: print( - "We could not detect an OSI-approved license for" + "We could not detect an OSI-approved license for " f"{package.name}@{package.version}: {package.license}" ) print() exit_code = 1 elif approved and package.name in EXCEPTIONS: print( - "Approved license detected for" + "Approved license detected for " f"{package.name}@{package.version}: {package.license}" ) print(f"Please remove the package from the EXCEPTIONS list: {package.name}") diff --git a/script/lint_and_test.py b/script/lint_and_test.py index e23870364b6..ff3db8aa1ed 100755 --- a/script/lint_and_test.py +++ b/script/lint_and_test.py @@ -20,7 +20,7 @@ except ImportError: RE_ASCII = re.compile(r"\033\[[^m]*m") -Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) +Error = namedtuple("Error", ["file", "line", "col", "msg", "skip"]) # noqa: PYI024 PASS = "green" FAIL = "bold_red" diff --git a/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py b/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py index 809902fa0dd..8e7854835d8 100644 --- a/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py +++ b/script/scaffold/templates/config_flow_helper/tests/test_config_flow.py @@ -59,7 +59,7 @@ def get_suggested(schema, key): return None return k.description["suggested_value"] # Wanted key absent from schema - raise Exception + raise KeyError(f"Key `{key}` is missing from schema") @pytest.mark.parametrize("platform", ["sensor"]) diff --git a/tests/common.py b/tests/common.py index 64e11ee7b51..c2d561551ca 100644 --- a/tests/common.py +++ b/tests/common.py @@ -23,7 +23,6 @@ import json import logging import os import pathlib -import threading import time from types import FrameType, ModuleType from typing import Any, Literal, NoReturn @@ -47,8 +46,8 @@ from homeassistant.components import device_automation, persistent_notification from homeassistant.components.device_automation import ( # noqa: F401 _async_get_device_automation_capabilities as async_get_device_automation_capabilities, ) -from homeassistant.config import async_process_component_config -from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.config import IntegrationConfigInfo, async_process_component_config +from homeassistant.config_entries import ConfigEntry, ConfigFlow, ConfigFlowResult from homeassistant.const import ( DEVICE_DEFAULT_NAME, EVENT_HOMEASSISTANT_CLOSE, @@ -93,6 +92,7 @@ from homeassistant.helpers.json import JSONEncoder, _orjson_default_encoder, jso from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.util.async_ import ( _SHUTDOWN_RUN_CALLBACK_THREADSAFE, + get_scheduled_timer_handles, run_callback_threadsafe, ) import homeassistant.util.dt as dt_util @@ -173,48 +173,6 @@ def get_test_config_dir(*add_path): return os.path.join(os.path.dirname(__file__), "testing_config", *add_path) -@contextmanager -def get_test_home_assistant() -> Generator[HomeAssistant]: - """Return a Home Assistant object pointing at test config directory.""" - loop = asyncio.new_event_loop() - asyncio.set_event_loop(loop) - context_manager = async_test_home_assistant(loop) - hass = loop.run_until_complete(context_manager.__aenter__()) - - loop_stop_event = threading.Event() - - def run_loop() -> None: - """Run event loop.""" - - loop._thread_ident = threading.get_ident() - hass.loop_thread_id = loop._thread_ident - loop.run_forever() - loop_stop_event.set() - - orig_stop = hass.stop - hass._stopped = Mock(set=loop.stop) - - def start_hass(*mocks: Any) -> None: - """Start hass.""" - asyncio.run_coroutine_threadsafe(hass.async_start(), loop).result() - - def stop_hass() -> None: - """Stop hass.""" - orig_stop() - loop_stop_event.wait() - - hass.start = start_hass - hass.stop = stop_hass - - threading.Thread(name="LoopThread", target=run_loop, daemon=False).start() - - try: - yield hass - finally: - loop.run_until_complete(context_manager.__aexit__(None, None, None)) - loop.close() - - class StoreWithoutWriteLoad[_T: (Mapping[str, Any] | Sequence[Any])](storage.Store[_T]): """Fake store that does not write or load. Used for testing.""" @@ -237,6 +195,7 @@ async def async_test_home_assistant( event_loop: asyncio.AbstractEventLoop | None = None, load_registries: bool = True, config_dir: str | None = None, + initial_state: CoreState = CoreState.running, ) -> AsyncGenerator[HomeAssistant]: """Return a Home Assistant object pointing at test config dir.""" hass = HomeAssistant(config_dir or get_test_config_dir()) @@ -364,7 +323,7 @@ async def async_test_home_assistant( await rs.async_load(hass) hass.data[bootstrap.DATA_REGISTRIES_LOADED] = None - hass.set_state(CoreState.running) + hass.set_state(initial_state) @callback def clear_instance(event): @@ -425,14 +384,16 @@ mock_service = threadsafe_callback_factory(async_mock_service) @callback -def async_mock_intent(hass, intent_typ): +def async_mock_intent(hass: HomeAssistant, intent_typ: str) -> list[intent.Intent]: """Set up a fake intent handler.""" - intents = [] + intents: list[intent.Intent] = [] class MockIntentHandler(intent.IntentHandler): intent_type = intent_typ - async def async_handle(self, intent_obj): + async def async_handle( + self, intent_obj: intent.Intent + ) -> intent.IntentResponse: """Handle the intent.""" intents.append(intent_obj) return intent_obj.create_response() @@ -531,7 +492,7 @@ def _async_fire_time_changed( hass: HomeAssistant, utc_datetime: datetime | None, fire_all: bool ) -> None: timestamp = dt_util.utc_to_timestamp(utc_datetime) - for task in list(hass.loop._scheduled): + for task in list(get_scheduled_timer_handles(hass.loop)): if not isinstance(task, asyncio.TimerHandle): continue if task.cancelled(): @@ -1093,6 +1054,25 @@ class MockConfigEntry(config_entries.ConfigEntry): """ self._async_set_state(hass, state, reason) + async def start_reauth_flow( + self, + hass: HomeAssistant, + context: dict[str, Any] | None = None, + data: dict[str, Any] | None = None, + ) -> ConfigFlowResult: + """Start a reauthentication flow.""" + return await hass.config_entries.flow.async_init( + self.domain, + context={ + "source": config_entries.SOURCE_REAUTH, + "entry_id": self.entry_id, + "title_placeholders": {"name": self.title}, + "unique_id": self.unique_id, + } + | (context or {}), + data=self.data | (data or {}), + ) + def patch_yaml_files(files_dict, endswith=True): """Patch load_yaml with a dictionary of yaml files.""" @@ -1145,7 +1125,12 @@ def assert_setup_component(count, domain=None): """ config = {} - async def mock_psc(hass, config_input, integration, component=None): + async def mock_psc( + hass: HomeAssistant, + config_input: ConfigType, + integration: loader.Integration, + component: loader.ComponentProtocol | None = None, + ) -> IntegrationConfigInfo: """Mock the prepare_setup_component to capture config.""" domain_input = integration.domain integration_config_info = await async_process_component_config( diff --git a/tests/components/abode/test_config_flow.py b/tests/components/abode/test_config_flow.py index 265a77560f7..a37fb8cbe33 100644 --- a/tests/components/abode/test_config_flow.py +++ b/tests/components/abode/test_config_flow.py @@ -12,7 +12,7 @@ from requests.exceptions import ConnectTimeout from homeassistant.components.abode import config_flow from homeassistant.components.abode.const import CONF_POLLING, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -161,18 +161,15 @@ async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth flow.""" conf = {CONF_USERNAME: "user@email.com", CONF_PASSWORD: "password"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id="user@email.com", data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) with patch("homeassistant.components.abode.config_flow.Abode"): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=conf, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/accuweather/snapshots/test_weather.ambr b/tests/components/accuweather/snapshots/test_weather.ambr index 49bf4008884..cbe1891d216 100644 --- a/tests/components/accuweather/snapshots/test_weather.ambr +++ b/tests/components/accuweather/snapshots/test_weather.ambr @@ -1,85 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 58, - 'condition': 'lightning-rainy', - 'datetime': '2020-07-26T05:00:00+00:00', - 'humidity': 60, - 'precipitation': 2.5, - 'precipitation_probability': 60, - 'temperature': 29.5, - 'templow': 15.4, - 'uv_index': 5, - 'wind_bearing': 166, - 'wind_gust_speed': 29.6, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 52, - 'condition': 'partlycloudy', - 'datetime': '2020-07-27T05:00:00+00:00', - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 26.2, - 'templow': 15.9, - 'uv_index': 7, - 'wind_bearing': 297, - 'wind_gust_speed': 14.8, - 'wind_speed': 9.3, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 65, - 'condition': 'partlycloudy', - 'datetime': '2020-07-28T05:00:00+00:00', - 'humidity': 52, - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 31.7, - 'templow': 16.8, - 'uv_index': 7, - 'wind_bearing': 198, - 'wind_gust_speed': 24.1, - 'wind_speed': 16.7, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 45, - 'condition': 'partlycloudy', - 'datetime': '2020-07-29T05:00:00+00:00', - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 9, - 'temperature': 24.0, - 'templow': 11.7, - 'uv_index': 6, - 'wind_bearing': 293, - 'wind_gust_speed': 24.1, - 'wind_speed': 13.0, - }), - dict({ - 'apparent_temperature': 22.2, - 'cloud_coverage': 50, - 'condition': 'partlycloudy', - 'datetime': '2020-07-30T05:00:00+00:00', - 'humidity': 55, - 'precipitation': 0.0, - 'precipitation_probability': 1, - 'temperature': 21.4, - 'templow': 12.2, - 'uv_index': 7, - 'wind_bearing': 280, - 'wind_gust_speed': 27.8, - 'wind_speed': 18.5, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.home': dict({ diff --git a/tests/components/advantage_air/__init__.py b/tests/components/advantage_air/__init__.py index 05d98e957bb..5587c668c7e 100644 --- a/tests/components/advantage_air/__init__.py +++ b/tests/components/advantage_air/__init__.py @@ -4,6 +4,7 @@ from unittest.mock import AsyncMock, patch from homeassistant.components.advantage_air.const import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PORT +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_json_object_fixture @@ -43,7 +44,7 @@ def patch_update(return_value=True, side_effect=None): ) -async def add_mock_config(hass): +async def add_mock_config(hass: HomeAssistant) -> MockConfigEntry: """Create a fake Advantage Air Config Entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/aemet/snapshots/test_weather.ambr b/tests/components/aemet/snapshots/test_weather.ambr index f19f95a6e80..58c854dcda9 100644 --- a/tests/components/aemet/snapshots/test_weather.ambr +++ b/tests/components/aemet/snapshots/test_weather.ambr @@ -1,494 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-08T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 2.0, - 'templow': -1.0, - 'wind_bearing': 90.0, - 'wind_speed': 0.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T23:00:00+00:00', - 'precipitation_probability': 30, - 'temperature': 4.0, - 'templow': -4.0, - 'wind_bearing': 45.0, - 'wind_speed': 20.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 3.0, - 'templow': -7.0, - 'wind_bearing': 0.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-11T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': -1.0, - 'templow': -13.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-01-12T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 6.0, - 'templow': -11.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-13T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 6.0, - 'templow': -7.0, - 'wind_bearing': None, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-14T23:00:00+00:00', - 'precipitation_probability': 0, - 'temperature': 5.0, - 'templow': -4.0, - 'wind_bearing': None, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T12:00:00+00:00', - 'precipitation': 2.7, - 'precipitation_probability': 100, - 'temperature': 0.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 22.0, - 'wind_speed': 15.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T13:00:00+00:00', - 'precipitation': 0.6, - 'precipitation_probability': 100, - 'temperature': 0.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 14.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T14:00:00+00:00', - 'precipitation': 0.8, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 20.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T15:00:00+00:00', - 'precipitation': 1.4, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 14.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T16:00:00+00:00', - 'precipitation': 1.2, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 9.0, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-01-09T17:00:00+00:00', - 'precipitation': 0.4, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 7.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T18:00:00+00:00', - 'precipitation': 0.3, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-01-09T19:00:00+00:00', - 'precipitation': 0.1, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 135.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-09T20:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 8.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-09T21:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 9.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T22:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 100, - 'temperature': 1.0, - 'wind_bearing': 90.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-09T23:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T00:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 10.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T01:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'fog', - 'datetime': '2021-01-10T02:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 0.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 9.0, - 'wind_speed': 6.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T03:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 12.0, - 'wind_speed': 8.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T04:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': -1.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 11.0, - 'wind_speed': 5.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T05:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 13.0, - 'wind_speed': 9.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T06:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 18.0, - 'wind_speed': 13.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T07:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T08:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 31.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T09:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 15, - 'temperature': 2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 22.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T12:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 32.0, - 'wind_speed': 20.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T13:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T14:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 4.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 28.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T15:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T16:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 5, - 'temperature': 2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T17:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-10T18:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T19:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 25.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T20:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 24.0, - 'wind_speed': 17.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T21:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 27.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T22:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 0.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 21.0, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-01-10T23:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 30.0, - 'wind_speed': 19.0, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-01-11T00:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -1.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 27.0, - 'wind_speed': 16.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T01:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 22.0, - 'wind_speed': 12.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T02:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -2.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 17.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T03:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -3.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 11.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T04:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': -4.0, - 'wind_bearing': 45.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 10.0, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-01-11T05:00:00+00:00', - 'precipitation_probability': None, - 'temperature': -4.0, - 'wind_bearing': 0.0, - 'wind_gust_speed': 15.0, - 'wind_speed': 10.0, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.aemet': dict({ diff --git a/tests/components/airgradient/conftest.py b/tests/components/airgradient/conftest.py index a6ee85ecbdd..1899e12c8ae 100644 --- a/tests/components/airgradient/conftest.py +++ b/tests/components/airgradient/conftest.py @@ -44,6 +44,7 @@ def mock_airgradient_client() -> Generator[AsyncMock]: client.get_config.return_value = Config.from_json( load_fixture("get_config_local.json", DOMAIN) ) + client.get_latest_firmware_version.return_value = "3.1.4" yield client diff --git a/tests/components/airgradient/snapshots/test_update.ambr b/tests/components/airgradient/snapshots/test_update.ambr new file mode 100644 index 00000000000..c639a97d5dd --- /dev/null +++ b/tests/components/airgradient/snapshots/test_update.ambr @@ -0,0 +1,58 @@ +# serializer version: 1 +# name: test_all_entities[update.airgradient_firmware-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.airgradient_firmware', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Firmware', + 'platform': 'airgradient', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '84fce612f5b8-update', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[update.airgradient_firmware-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'device_class': 'firmware', + 'entity_picture': 'https://brands.home-assistant.io/_/airgradient/icon.png', + 'friendly_name': 'Airgradient Firmware', + 'in_progress': False, + 'installed_version': '3.1.1', + 'latest_version': '3.1.4', + 'release_summary': None, + 'release_url': None, + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.airgradient_firmware', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/airgradient/test_update.py b/tests/components/airgradient/test_update.py new file mode 100644 index 00000000000..020a9a82a71 --- /dev/null +++ b/tests/components/airgradient/test_update.py @@ -0,0 +1,69 @@ +"""Tests for the AirGradient update platform.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, patch + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch("homeassistant.components.airgradient.PLATFORMS", [Platform.UPDATE]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_update_mechanism( + hass: HomeAssistant, + mock_airgradient_client: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update entity.""" + await setup_integration(hass, mock_config_entry) + + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_ON + assert state.attributes["installed_version"] == "3.1.1" + assert state.attributes["latest_version"] == "3.1.4" + mock_airgradient_client.get_latest_firmware_version.assert_called_once() + mock_airgradient_client.get_latest_firmware_version.reset_mock() + + mock_airgradient_client.get_current_measures.return_value.firmware_version = "3.1.4" + + freezer.tick(timedelta(minutes=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_OFF + assert state.attributes["installed_version"] == "3.1.4" + assert state.attributes["latest_version"] == "3.1.4" + + mock_airgradient_client.get_latest_firmware_version.return_value = "3.1.5" + + freezer.tick(timedelta(minutes=59)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + mock_airgradient_client.get_latest_firmware_version.assert_called_once() + state = hass.states.get("update.airgradient_firmware") + assert state.state == STATE_ON + assert state.attributes["installed_version"] == "3.1.4" + assert state.attributes["latest_version"] == "3.1.5" diff --git a/tests/components/airthings_ble/__init__.py b/tests/components/airthings_ble/__init__.py index 45521903a08..a736fa979e9 100644 --- a/tests/components/airthings_ble/__init__.py +++ b/tests/components/airthings_ble/__init__.py @@ -13,6 +13,7 @@ from airthings_ble import ( from homeassistant.components.airthings_ble.const import DOMAIN from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant from homeassistant.helpers.device_registry import CONNECTION_BLUETOOTH, DeviceRegistry from tests.common import MockConfigEntry, MockEntity @@ -225,7 +226,7 @@ VOC_V3 = MockEntity( ) -def create_entry(hass): +def create_entry(hass: HomeAssistant) -> MockConfigEntry: """Create a config entry.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/airvisual/test_config_flow.py b/tests/components/airvisual/test_config_flow.py index b9643b17c07..e38fc64587e 100644 --- a/tests/components/airvisual/test_config_flow.py +++ b/tests/components/airvisual/test_config_flow.py @@ -18,7 +18,7 @@ from homeassistant.components.airvisual import ( INTEGRATION_TYPE_GEOGRAPHY_COORDS, INTEGRATION_TYPE_GEOGRAPHY_NAME, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SHOW_ON_MAP from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -33,6 +33,8 @@ from .conftest import ( TEST_STATE, ) +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -146,12 +148,10 @@ async def test_options_flow( async def test_step_reauth( - hass: HomeAssistant, config_entry, setup_config_entry + hass: HomeAssistant, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config_entry.data - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/airvisual_pro/test_config_flow.py b/tests/components/airvisual_pro/test_config_flow.py index 803a335f52c..9298b8cf528 100644 --- a/tests/components/airvisual_pro/test_config_flow.py +++ b/tests/components/airvisual_pro/test_config_flow.py @@ -10,11 +10,13 @@ from pyairvisual.node import ( import pytest from homeassistant.components.airvisual_pro.const import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -98,22 +100,14 @@ async def test_step_import(hass: HomeAssistant, config, setup_airvisual_pro) -> async def test_reauth( hass: HomeAssistant, config, - config_entry, + config_entry: MockConfigEntry, connect_errors, connect_mock, pro, setup_airvisual_pro, ) -> None: """Test re-auth (including errors).""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=config, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/airzone/test_select.py b/tests/components/airzone/test_select.py index 01617eab175..343c033728a 100644 --- a/tests/components/airzone/test_select.py +++ b/tests/components/airzone/test_select.py @@ -2,17 +2,19 @@ from unittest.mock import patch +from aioairzone.common import OperationMode from aioairzone.const import ( API_COLD_ANGLE, API_DATA, API_HEAT_ANGLE, + API_MODE, API_SLEEP, API_SYSTEM_ID, API_ZONE_ID, ) import pytest -from homeassistant.components.select import DOMAIN as SELECT_DOMAIN +from homeassistant.components.select import ATTR_OPTIONS, DOMAIN as SELECT_DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, SERVICE_SELECT_OPTION from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError @@ -31,6 +33,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.despacho_heat_angle") assert state.state == "90deg" + state = hass.states.get("select.despacho_mode") + assert state is None + state = hass.states.get("select.despacho_sleep") assert state.state == "off" @@ -40,6 +45,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_1_heat_angle") assert state.state == "90deg" + state = hass.states.get("select.dorm_1_mode") + assert state is None + state = hass.states.get("select.dorm_1_sleep") assert state.state == "off" @@ -49,6 +57,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_2_heat_angle") assert state.state == "90deg" + state = hass.states.get("select.dorm_2_mode") + assert state is None + state = hass.states.get("select.dorm_2_sleep") assert state.state == "off" @@ -58,6 +69,9 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.dorm_ppal_heat_angle") assert state.state == "50deg" + state = hass.states.get("select.dorm_ppal_mode") + assert state is None + state = hass.states.get("select.dorm_ppal_sleep") assert state.state == "30m" @@ -67,6 +81,16 @@ async def test_airzone_create_selects(hass: HomeAssistant) -> None: state = hass.states.get("select.salon_heat_angle") assert state.state == "90deg" + state = hass.states.get("select.salon_mode") + assert state.state == "heat" + assert state.attributes.get(ATTR_OPTIONS) == [ + "cool", + "dry", + "fan", + "heat", + "stop", + ] + state = hass.states.get("select.salon_sleep") assert state.state == "off" @@ -115,6 +139,50 @@ async def test_airzone_select_sleep(hass: HomeAssistant) -> None: assert state.state == "30m" +async def test_airzone_select_mode(hass: HomeAssistant) -> None: + """Test select HVAC mode.""" + + await async_init_integration(hass) + + put_hvac_mode = { + API_DATA: [ + { + API_SYSTEM_ID: 1, + API_ZONE_ID: 1, + API_MODE: OperationMode.COOLING, + } + ] + } + + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.salon_mode", + ATTR_OPTION: "Invalid", + }, + blocking=True, + ) + + with patch( + "homeassistant.components.airzone.AirzoneLocalApi.put_hvac", + return_value=put_hvac_mode, + ): + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: "select.salon_mode", + ATTR_OPTION: "cool", + }, + blocking=True, + ) + + state = hass.states.get("select.salon_mode") + assert state.state == "cool" + + async def test_airzone_select_grille_angle(hass: HomeAssistant) -> None: """Test select sleep.""" diff --git a/tests/components/alarm_control_panel/common.py b/tests/components/alarm_control_panel/common.py index 9ec419d8cf0..36e9918f54c 100644 --- a/tests/components/alarm_control_panel/common.py +++ b/tests/components/alarm_control_panel/common.py @@ -27,11 +27,14 @@ from homeassistant.const import ( STATE_ALARM_DISARMED, STATE_ALARM_TRIGGERED, ) +from homeassistant.core import HomeAssistant from tests.common import MockEntity -async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_disarm( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -42,7 +45,9 @@ async def async_alarm_disarm(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_DISARM, data, blocking=True) -async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_home( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -53,7 +58,9 @@ async def async_alarm_arm_home(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_HOME, data, blocking=True) -async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_away( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -64,7 +71,9 @@ async def async_alarm_arm_away(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_AWAY, data, blocking=True) -async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_night( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -75,7 +84,9 @@ async def async_alarm_arm_night(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_ARM_NIGHT, data, blocking=True) -async def async_alarm_arm_vacation(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_vacation( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for vacation mode.""" data = {} if code: @@ -88,7 +99,9 @@ async def async_alarm_arm_vacation(hass, code=None, entity_id=ENTITY_MATCH_ALL): ) -async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_trigger( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: @@ -99,7 +112,9 @@ async def async_alarm_trigger(hass, code=None, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_ALARM_TRIGGER, data, blocking=True) -async def async_alarm_arm_custom_bypass(hass, code=None, entity_id=ENTITY_MATCH_ALL): +async def async_alarm_arm_custom_bypass( + hass: HomeAssistant, code: str | None = None, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the alarm the command for disarm.""" data = {} if code: diff --git a/tests/components/alexa/test_auth.py b/tests/components/alexa/test_auth.py index 8d4308ba792..b3aa645bfcb 100644 --- a/tests/components/alexa/test_auth.py +++ b/tests/components/alexa/test_auth.py @@ -10,14 +10,14 @@ from tests.test_util.aiohttp import AiohttpClientMocker async def run_auth_get_access_token( - hass, - aioclient_mock, - expires_in, - client_id, - client_secret, - accept_grant_code, - refresh_token, -): + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + expires_in: int, + client_id: str, + client_secret: str, + accept_grant_code: str, + refresh_token: str, +) -> None: """Do auth and request a new token for tests.""" aioclient_mock.post( TEST_TOKEN_URL, diff --git a/tests/components/alexa/test_common.py b/tests/components/alexa/test_common.py index 9fdcc1c89c1..e78f2cba40f 100644 --- a/tests/components/alexa/test_common.py +++ b/tests/components/alexa/test_common.py @@ -1,5 +1,8 @@ """Test helpers for the Alexa integration.""" +from __future__ import annotations + +from typing import Any from unittest.mock import Mock from uuid import uuid4 @@ -7,7 +10,7 @@ import pytest from homeassistant.components.alexa import config, smart_home from homeassistant.components.alexa.const import CONF_ENDPOINT, CONF_FILTER, CONF_LOCALE -from homeassistant.core import Context, callback +from homeassistant.core import Context, HomeAssistant, ServiceCall, callback from homeassistant.helpers import entityfilter from tests.common import async_mock_service @@ -28,7 +31,7 @@ class MockConfig(smart_home.AlexaConfig): "camera.test": {"display_categories": "CAMERA"}, } - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Mock Alexa config.""" super().__init__( hass, @@ -62,7 +65,7 @@ class MockConfig(smart_home.AlexaConfig): """Accept a grant.""" -def get_default_config(hass): +def get_default_config(hass: HomeAssistant) -> MockConfig: """Return a MockConfig instance.""" return MockConfig(hass) @@ -93,15 +96,15 @@ def get_new_request(namespace, name, endpoint=None): async def assert_request_calls_service( - namespace, - name, - endpoint, - service, - hass, + namespace: str, + name: str, + endpoint: str, + service: str, + hass: HomeAssistant, response_type="Response", - payload=None, - instance=None, -): + payload: dict[str, Any] | None = None, + instance: str | None = None, +) -> tuple[ServiceCall, dict[str, Any]]: """Assert an API request calls a hass service.""" context = Context() request = get_new_request(namespace, name, endpoint) @@ -129,8 +132,14 @@ async def assert_request_calls_service( async def assert_request_fails( - namespace, name, endpoint, service_not_called, hass, payload=None, instance=None -): + namespace: str, + name: str, + endpoint: str, + service_not_called: str, + hass: HomeAssistant, + payload: dict[str, Any] | None = None, + instance: str | None = None, +) -> None: """Assert an API request returns an ErrorResponse.""" request = get_new_request(namespace, name, endpoint) if payload: @@ -152,8 +161,12 @@ async def assert_request_fails( async def assert_power_controller_works( - endpoint, on_service, off_service, hass, timestamp -): + endpoint: str, + on_service: str, + off_service: str, + hass: HomeAssistant, + timestamp: str, +) -> None: """Assert PowerController API requests work.""" _, response = await assert_request_calls_service( "Alexa.PowerController", "TurnOn", endpoint, on_service, hass @@ -169,8 +182,12 @@ async def assert_power_controller_works( async def assert_scene_controller_works( - endpoint, activate_service, deactivate_service, hass, timestamp -): + endpoint: str, + activate_service: str, + deactivate_service: str, + hass: HomeAssistant, + timestamp: str, +) -> None: """Assert SceneController API requests work.""" _, response = await assert_request_calls_service( "Alexa.SceneController", @@ -196,7 +213,9 @@ async def assert_scene_controller_works( assert response["event"]["payload"]["timestamp"] == timestamp -async def reported_properties(hass, endpoint, return_full_response=False): +async def reported_properties( + hass: HomeAssistant, endpoint: str, return_full_response: bool = False +) -> ReportedProperties: """Use ReportState to get properties and return them. The result is a ReportedProperties instance, which has methods to make @@ -213,7 +232,7 @@ async def reported_properties(hass, endpoint, return_full_response=False): class ReportedProperties: """Class to help assert reported properties.""" - def __init__(self, properties): + def __init__(self, properties) -> None: """Initialize class.""" self.properties = properties diff --git a/tests/components/alexa/test_smart_home.py b/tests/components/alexa/test_smart_home.py index fb27c91eea7..6ccf265dcdc 100644 --- a/tests/components/alexa/test_smart_home.py +++ b/tests/components/alexa/test_smart_home.py @@ -120,7 +120,9 @@ async def test_wrong_version(hass: HomeAssistant) -> None: await smart_home.async_handle_message(hass, get_default_config(hass), msg) -async def discovery_test(device, hass, expected_endpoints=1): +async def discovery_test( + device, hass: HomeAssistant, expected_endpoints: int = 1 +) -> dict[str, Any] | list[dict[str, Any]] | None: """Test alexa discovery request.""" request = get_new_request("Alexa.Discovery", "Discover") @@ -2601,8 +2603,15 @@ async def test_stop_valve( async def assert_percentage_changes( - hass, adjustments, namespace, name, endpoint, parameter, service, changed_parameter -): + hass: HomeAssistant, + adjustments, + namespace, + name, + endpoint, + parameter, + service, + changed_parameter, +) -> None: """Assert an API request making percentage changes works. AdjustPercentage, AdjustBrightness, etc. are examples of such requests. @@ -2616,8 +2625,15 @@ async def assert_percentage_changes( async def assert_range_changes( - hass, adjustments, namespace, name, endpoint, service, changed_parameter, instance -): + hass: HomeAssistant, + adjustments: list[tuple[int | str, int, bool]], + namespace: str, + name: str, + endpoint: str, + service: str, + changed_parameter: str | None, + instance: str, +) -> None: """Assert an API request making range changes works. AdjustRangeValue are examples of such requests. diff --git a/tests/components/alexa/test_smart_home_http.py b/tests/components/alexa/test_smart_home_http.py index 1c30c72e72c..20d9b30dda5 100644 --- a/tests/components/alexa/test_smart_home_http.py +++ b/tests/components/alexa/test_smart_home_http.py @@ -5,6 +5,7 @@ import json import logging from typing import Any +from aiohttp import ClientResponse import pytest from homeassistant.components.alexa import DOMAIN, smart_home @@ -17,7 +18,9 @@ from .test_common import get_new_request from tests.typing import ClientSessionGenerator -async def do_http_discovery(config, hass, hass_client): +async def do_http_discovery( + config: dict[str, Any], hass: HomeAssistant, hass_client: ClientSessionGenerator +) -> ClientResponse: """Submit a request to the Smart Home HTTP API.""" await async_setup_component(hass, DOMAIN, config) http_client = await hass_client() diff --git a/tests/components/ambient_station/test_config_flow.py b/tests/components/ambient_station/test_config_flow.py index 19ae9828c22..e4c8efabc20 100644 --- a/tests/components/ambient_station/test_config_flow.py +++ b/tests/components/ambient_station/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from aioambient.errors import AmbientError import pytest -from homeassistant.components.ambient_station import CONF_APP_KEY, DOMAIN +from homeassistant.components.ambient_station.const import CONF_APP_KEY, DOMAIN from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant diff --git a/tests/components/androidtv/patchers.py b/tests/components/androidtv/patchers.py index 1c32e1770e0..500b9e75cb3 100644 --- a/tests/components/androidtv/patchers.py +++ b/tests/components/androidtv/patchers.py @@ -1,5 +1,6 @@ """Define patches used for androidtv tests.""" +from typing import Any from unittest.mock import patch from androidtv.adb_manager.adb_manager_async import DeviceAsync @@ -25,7 +26,7 @@ PROPS_DEV_MAC = "ether ab:cd:ef:gh:ij:kl brd" class AdbDeviceTcpAsyncFake: """A fake of the `adb_shell.adb_device_async.AdbDeviceTcpAsync` class.""" - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: """Initialize a fake `adb_shell.adb_device_async.AdbDeviceTcpAsync` instance.""" self.available = False diff --git a/tests/components/androidtv/test_config_flow.py b/tests/components/androidtv/test_config_flow.py index e2b5207c590..b73fee9fb10 100644 --- a/tests/components/androidtv/test_config_flow.py +++ b/tests/components/androidtv/test_config_flow.py @@ -73,7 +73,7 @@ CONNECT_METHOD = ( class MockConfigDevice: """Mock class to emulate Android device.""" - def __init__(self, eth_mac=ETH_MAC, wifi_mac=None): + def __init__(self, eth_mac=ETH_MAC, wifi_mac=None) -> None: """Initialize a fake device to test config flow.""" self.available = True self.device_properties = {PROP_ETHMAC: eth_mac, PROP_WIFIMAC: wifi_mac} diff --git a/tests/components/anthropic/__init__.py b/tests/components/anthropic/__init__.py new file mode 100644 index 00000000000..99d7a5785a8 --- /dev/null +++ b/tests/components/anthropic/__init__.py @@ -0,0 +1 @@ +"""Tests for the Anthropic integration.""" diff --git a/tests/components/anthropic/conftest.py b/tests/components/anthropic/conftest.py new file mode 100644 index 00000000000..ce6b98c480c --- /dev/null +++ b/tests/components/anthropic/conftest.py @@ -0,0 +1,57 @@ +"""Tests helpers.""" + +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import HomeAssistant +from homeassistant.helpers import llm +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.fixture +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: + """Mock a config entry.""" + entry = MockConfigEntry( + title="Claude", + domain="anthropic", + data={ + "api_key": "bla", + }, + ) + entry.add_to_hass(hass) + return entry + + +@pytest.fixture +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: + """Mock a config entry with assist.""" + hass.config_entries.async_update_entry( + mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} + ) + return mock_config_entry + + +@pytest.fixture +async def mock_init_component( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> AsyncGenerator[None]: + """Initialize integration.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + assert await async_setup_component(hass, "anthropic", {}) + await hass.async_block_till_done() + yield + + +@pytest.fixture(autouse=True) +async def setup_ha(hass: HomeAssistant) -> None: + """Set up Home Assistant.""" + assert await async_setup_component(hass, "homeassistant", {}) diff --git a/tests/components/anthropic/snapshots/test_conversation.ambr b/tests/components/anthropic/snapshots/test_conversation.ambr new file mode 100644 index 00000000000..e4dd7cd00bb --- /dev/null +++ b/tests/components/anthropic/snapshots/test_conversation.ambr @@ -0,0 +1,34 @@ +# serializer version: 1 +# name: test_unknown_hass_api + dict({ + 'conversation_id': None, + 'response': IntentResponse( + card=dict({ + }), + error_code=, + failed_results=list([ + ]), + intent=None, + intent_targets=list([ + ]), + language='en', + matched_states=list([ + ]), + reprompt=dict({ + }), + response_type=, + speech=dict({ + 'plain': dict({ + 'extra_data': None, + 'speech': 'Error preparing LLM API: API non-existing not found', + }), + }), + speech_slots=dict({ + }), + success_results=list([ + ]), + unmatched_states=list([ + ]), + ), + }) +# --- diff --git a/tests/components/anthropic/test_config_flow.py b/tests/components/anthropic/test_config_flow.py new file mode 100644 index 00000000000..df27352b7b2 --- /dev/null +++ b/tests/components/anthropic/test_config_flow.py @@ -0,0 +1,239 @@ +"""Test the Anthropic config flow.""" + +from unittest.mock import AsyncMock, patch + +from anthropic import ( + APIConnectionError, + APIResponseValidationError, + APITimeoutError, + AuthenticationError, + BadRequestError, + InternalServerError, +) +from httpx import URL, Request, Response +import pytest + +from homeassistant import config_entries +from homeassistant.components.anthropic.config_flow import RECOMMENDED_OPTIONS +from homeassistant.components.anthropic.const import ( + CONF_CHAT_MODEL, + CONF_MAX_TOKENS, + CONF_PROMPT, + CONF_RECOMMENDED, + CONF_TEMPERATURE, + DOMAIN, + RECOMMENDED_CHAT_MODEL, + RECOMMENDED_MAX_TOKENS, +) +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form(hass: HomeAssistant) -> None: + """Test we get the form.""" + # Pretend we already set up a config entry. + hass.config.components.add("anthropic") + MockConfigEntry( + domain=DOMAIN, + state=config_entries.ConfigEntryState.LOADED, + ).add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] is None + + with ( + patch( + "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + ), + patch( + "homeassistant.components.anthropic.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "api_key": "bla", + }, + ) + await hass.async_block_till_done() + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["data"] == { + "api_key": "bla", + } + assert result2["options"] == RECOMMENDED_OPTIONS + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_options( + hass: HomeAssistant, mock_config_entry, mock_init_component +) -> None: + """Test the options form.""" + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + "prompt": "Speak like a pirate", + "max_tokens": 200, + }, + ) + await hass.async_block_till_done() + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"]["prompt"] == "Speak like a pirate" + assert options["data"]["max_tokens"] == 200 + assert options["data"][CONF_CHAT_MODEL] == RECOMMENDED_CHAT_MODEL + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (APIConnectionError(request=None), "cannot_connect"), + (APITimeoutError(request=None), "timeout_connect"), + ( + BadRequestError( + message="Your credit balance is too low to access the Claude API. Please go to Plans & Billing to upgrade or purchase credits.", + response=Response( + status_code=400, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "invalid_request_error"}}, + ), + "invalid_request_error", + ), + ( + AuthenticationError( + message="invalid x-api-key", + response=Response( + status_code=401, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "authentication_error"}}, + ), + "authentication_error", + ), + ( + InternalServerError( + message=None, + response=Response( + status_code=500, + request=Request(method="POST", url=URL()), + ), + body=None, + ), + "unknown", + ), + ( + APIResponseValidationError( + response=Response( + status_code=200, + request=Request(method="POST", url=URL()), + ), + body=None, + ), + "unknown", + ), + ], +) +async def test_form_invalid_auth(hass: HomeAssistant, side_effect, error) -> None: + """Test we handle invalid auth.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + + with patch( + "homeassistant.components.anthropic.config_flow.anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=side_effect, + ): + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "api_key": "bla", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": error} + + +@pytest.mark.parametrize( + ("current_options", "new_options", "expected_options"), + [ + ( + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "none", + CONF_PROMPT: "bla", + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + }, + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + }, + ), + ( + { + CONF_RECOMMENDED: False, + CONF_PROMPT: "Speak like a pirate", + CONF_TEMPERATURE: 0.3, + CONF_CHAT_MODEL: RECOMMENDED_CHAT_MODEL, + CONF_MAX_TOKENS: RECOMMENDED_MAX_TOKENS, + }, + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "assist", + CONF_PROMPT: "", + }, + { + CONF_RECOMMENDED: True, + CONF_LLM_HASS_API: "assist", + CONF_PROMPT: "", + }, + ), + ], +) +async def test_options_switching( + hass: HomeAssistant, + mock_config_entry, + mock_init_component, + current_options, + new_options, + expected_options, +) -> None: + """Test the options form.""" + hass.config_entries.async_update_entry(mock_config_entry, options=current_options) + options_flow = await hass.config_entries.options.async_init( + mock_config_entry.entry_id + ) + if current_options.get(CONF_RECOMMENDED) != new_options.get(CONF_RECOMMENDED): + options_flow = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + { + **current_options, + CONF_RECOMMENDED: new_options[CONF_RECOMMENDED], + }, + ) + options = await hass.config_entries.options.async_configure( + options_flow["flow_id"], + new_options, + ) + await hass.async_block_till_done() + assert options["type"] is FlowResultType.CREATE_ENTRY + assert options["data"] == expected_options diff --git a/tests/components/anthropic/test_conversation.py b/tests/components/anthropic/test_conversation.py new file mode 100644 index 00000000000..65ede877281 --- /dev/null +++ b/tests/components/anthropic/test_conversation.py @@ -0,0 +1,487 @@ +"""Tests for the Anthropic integration.""" + +from unittest.mock import AsyncMock, Mock, patch + +from anthropic import RateLimitError +from anthropic.types import Message, TextBlock, ToolUseBlock, Usage +from freezegun import freeze_time +from httpx import URL, Request, Response +from syrupy.assertion import SnapshotAssertion +import voluptuous as vol + +from homeassistant.components import conversation +from homeassistant.components.conversation import trace +from homeassistant.const import CONF_LLM_HASS_API +from homeassistant.core import Context, HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import intent, llm +from homeassistant.setup import async_setup_component +from homeassistant.util import ulid + +from tests.common import MockConfigEntry + + +async def test_entity( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test entity properties.""" + state = hass.states.get("conversation.claude") + assert state + assert state.attributes["supported_features"] == 0 + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "assist", + }, + ) + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + await hass.config_entries.async_reload(mock_config_entry.entry_id) + + state = hass.states.get("conversation.claude") + assert state + assert ( + state.attributes["supported_features"] + == conversation.ConversationEntityFeature.CONTROL + ) + + +async def test_error_handling( + hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_init_component +) -> None: + """Test that the default prompt works.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=RateLimitError( + message=None, + response=Response( + status_code=429, request=Request(method="POST", url=URL()) + ), + body=None, + ), + ): + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.error_code == "unknown", result + + +async def test_template_error( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that template error handling works.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + "prompt": "talk like a {% if True %}smarthome{% else %}pirate please.", + }, + ) + with patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result.response.response_type == intent.IntentResponseType.ERROR, result + assert result.response.error_code == "unknown", result + + +async def test_template_variables( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test that template variables work.""" + context = Context(user_id="12345") + mock_user = Mock() + mock_user.id = "12345" + mock_user.name = "Test User" + + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + "prompt": ( + "The user name is {{ user_name }}. " + "The user id is {{ llm_context.context.user_id }}." + ), + }, + ) + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock + ) as mock_create, + patch("homeassistant.auth.AuthManager.async_get_user", return_value=mock_user), + ): + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + result = await conversation.async_converse( + hass, "hello", None, context, agent_id="conversation.claude" + ) + + assert ( + result.response.response_type == intent.IntentResponseType.ACTION_DONE + ), result + assert "The user name is Test User." in mock_create.mock_calls[1][2]["system"] + assert "The user id is 12345." in mock_create.mock_calls[1][2]["system"] + + +async def test_conversation_agent( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test Anthropic Agent.""" + agent = conversation.agent_manager.async_get_agent(hass, "conversation.claude") + assert agent.supported_languages == "*" + + +@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") +async def test_function_call( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test function call from the assistant.""" + agent_id = "conversation.claude" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.return_value = "Test response" + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + for content in message["content"]: + if not isinstance(content, str) and content["type"] == "tool_use": + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock( + type="text", + text="I have successfully called the function", + ) + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock(type="text", text="Certainly, calling it now!"), + ToolUseBlock( + type="tool_use", + id="toolu_0123456789AbCdEfGhIjKlM", + name="test_tool", + input={"param1": "test_value"}, + ), + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="tool_use", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create, + freeze_time("2024-06-03 23:00:00"), + ): + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert "Today's date is 2024-06-03." in mock_create.mock_calls[1][2]["system"] + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert mock_create.mock_calls[1][2]["messages"][2] == { + "role": "user", + "content": [ + { + "content": '"Test response"', + "tool_use_id": "toolu_0123456789AbCdEfGhIjKlM", + "type": "tool_result", + } + ], + } + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={"param1": "test_value"}, + ), + llm.LLMContext( + platform="anthropic", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + # Test Conversation tracing + traces = trace.async_get_traces() + assert traces + last_trace = traces[-1].as_dict() + trace_events = last_trace.get("events", []) + assert [event["event_type"] for event in trace_events] == [ + trace.ConversationTraceEventType.ASYNC_PROCESS, + trace.ConversationTraceEventType.AGENT_DETAIL, + trace.ConversationTraceEventType.TOOL_CALL, + ] + # AGENT_DETAIL event contains the raw prompt passed to the model + detail_event = trace_events[1] + assert "Answer in plain text" in detail_event["data"]["system"] + assert "Today's date is 2024-06-03." in trace_events[1]["data"]["system"] + + # Call it again, make sure we have updated prompt + with ( + patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create, + freeze_time("2024-06-04 23:00:00"), + ): + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert "Today's date is 2024-06-04." in mock_create.mock_calls[1][2]["system"] + # Test old assert message not updated + assert "Today's date is 2024-06-03." in trace_events[1]["data"]["system"] + + +@patch("homeassistant.components.anthropic.conversation.llm.AssistAPI._async_get_tools") +async def test_function_exception( + mock_get_tools, + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test function call with exception.""" + agent_id = "conversation.claude" + context = Context() + + mock_tool = AsyncMock() + mock_tool.name = "test_tool" + mock_tool.description = "Test function" + mock_tool.parameters = vol.Schema( + {vol.Optional("param1", description="Test parameters"): str} + ) + mock_tool.async_call.side_effect = HomeAssistantError("Test tool exception") + + mock_get_tools.return_value = [mock_tool] + + def completion_result(*args, messages, **kwargs): + for message in messages: + for content in message["content"]: + if not isinstance(content, str) and content["type"] == "tool_use": + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock( + type="text", + text="There was an error calling the function", + ) + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + return Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[ + TextBlock(type="text", text="Certainly, calling it now!"), + ToolUseBlock( + type="tool_use", + id="toolu_0123456789AbCdEfGhIjKlM", + name="test_tool", + input={"param1": "test_value"}, + ), + ], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="tool_use", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ) + + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=completion_result, + ) as mock_create: + result = await conversation.async_converse( + hass, + "Please call the test function", + None, + context, + agent_id=agent_id, + ) + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert mock_create.mock_calls[1][2]["messages"][2] == { + "role": "user", + "content": [ + { + "content": '{"error": "HomeAssistantError", "error_text": "Test tool exception"}', + "tool_use_id": "toolu_0123456789AbCdEfGhIjKlM", + "type": "tool_result", + } + ], + } + mock_tool.async_call.assert_awaited_once_with( + hass, + llm.ToolInput( + tool_name="test_tool", + tool_args={"param1": "test_value"}, + ), + llm.LLMContext( + platform="anthropic", + context=context, + user_prompt="Please call the test function", + language="en", + assistant="conversation", + device_id=None, + ), + ) + + +async def test_assist_api_tools_conversion( + hass: HomeAssistant, + mock_config_entry_with_assist: MockConfigEntry, + mock_init_component, +) -> None: + """Test that we are able to convert actual tools from Assist API.""" + for component in ( + "intent", + "todo", + "light", + "shopping_list", + "humidifier", + "climate", + "media_player", + "vacuum", + "cover", + "weather", + ): + assert await async_setup_component(hass, component, {}) + + agent_id = "conversation.claude" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + return_value=Message( + type="message", + id="msg_1234567890ABCDEFGHIJKLMN", + content=[TextBlock(type="text", text="Hello, how can I help you?")], + model="claude-3-5-sonnet-20240620", + role="assistant", + stop_reason="end_turn", + stop_sequence=None, + usage=Usage(input_tokens=8, output_tokens=12), + ), + ) as mock_create: + await conversation.async_converse( + hass, "hello", None, Context(), agent_id=agent_id + ) + + tools = mock_create.mock_calls[0][2]["tools"] + assert tools + + +async def test_unknown_hass_api( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + mock_init_component, +) -> None: + """Test when we reference an API that no longer exists.""" + hass.config_entries.async_update_entry( + mock_config_entry, + options={ + **mock_config_entry.options, + CONF_LLM_HASS_API: "non-existing", + }, + ) + + result = await conversation.async_converse( + hass, "hello", None, Context(), agent_id="conversation.claude" + ) + + assert result == snapshot + + +@patch("anthropic.resources.messages.AsyncMessages.create", new_callable=AsyncMock) +async def test_conversation_id( + mock_create, + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, +) -> None: + """Test conversation ID is honored.""" + result = await conversation.async_converse( + hass, "hello", None, None, agent_id="conversation.claude" + ) + + conversation_id = result.conversation_id + + result = await conversation.async_converse( + hass, "hello", conversation_id, None, agent_id="conversation.claude" + ) + + assert result.conversation_id == conversation_id + + unknown_id = ulid.ulid() + + result = await conversation.async_converse( + hass, "hello", unknown_id, None, agent_id="conversation.claude" + ) + + assert result.conversation_id != unknown_id + + result = await conversation.async_converse( + hass, "hello", "koala", None, agent_id="conversation.claude" + ) + + assert result.conversation_id == "koala" diff --git a/tests/components/anthropic/test_init.py b/tests/components/anthropic/test_init.py new file mode 100644 index 00000000000..ee87bb708d0 --- /dev/null +++ b/tests/components/anthropic/test_init.py @@ -0,0 +1,64 @@ +"""Tests for the Anthropic integration.""" + +from unittest.mock import AsyncMock, patch + +from anthropic import ( + APIConnectionError, + APITimeoutError, + AuthenticationError, + BadRequestError, +) +from httpx import URL, Request, Response +import pytest + +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize( + ("side_effect", "error"), + [ + (APIConnectionError(request=None), "Connection error"), + (APITimeoutError(request=None), "Request timed out"), + ( + BadRequestError( + message="Your credit balance is too low to access the Claude API. Please go to Plans & Billing to upgrade or purchase credits.", + response=Response( + status_code=400, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "invalid_request_error"}}, + ), + "anthropic integration not ready yet: Your credit balance is too low to access the Claude API", + ), + ( + AuthenticationError( + message="invalid x-api-key", + response=Response( + status_code=401, + request=Request(method="POST", url=URL()), + ), + body={"type": "error", "error": {"type": "authentication_error"}}, + ), + "Invalid API key", + ), + ], +) +async def test_init_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, + side_effect, + error, +) -> None: + """Test initialization errors.""" + with patch( + "anthropic.resources.messages.AsyncMessages.create", + new_callable=AsyncMock, + side_effect=side_effect, + ): + assert await async_setup_component(hass, "anthropic", {}) + await hass.async_block_till_done() + assert error in caplog.text diff --git a/tests/components/apache_kafka/test_init.py b/tests/components/apache_kafka/test_init.py index 2b702046054..cffe08ffd4a 100644 --- a/tests/components/apache_kafka/test_init.py +++ b/tests/components/apache_kafka/test_init.py @@ -3,8 +3,9 @@ from __future__ import annotations from asyncio import AbstractEventLoop -from collections.abc import Callable +from collections.abc import Callable, Generator from dataclasses import dataclass +from typing import Any from unittest.mock import patch import pytest @@ -41,7 +42,7 @@ class MockKafkaClient: @pytest.fixture(name="mock_client") -def mock_client_fixture(): +def mock_client_fixture() -> Generator[MockKafkaClient]: """Mock the apache kafka client.""" with ( patch(f"{PRODUCER_PATH}.start") as start, @@ -89,7 +90,7 @@ async def test_full_config(hass: HomeAssistant, mock_client: MockKafkaClient) -> mock_client.start.assert_called_once() -async def _setup(hass, filter_config): +async def _setup(hass: HomeAssistant, filter_config: dict[str, Any]) -> None: """Shared set up for filtering tests.""" config = {apache_kafka.DOMAIN: {"filter": filter_config}} config[apache_kafka.DOMAIN].update(MIN_CONFIG) @@ -98,7 +99,9 @@ async def _setup(hass, filter_config): await hass.async_block_till_done() -async def _run_filter_tests(hass, tests, mock_client): +async def _run_filter_tests( + hass: HomeAssistant, tests: list[FilterTest], mock_client: MockKafkaClient +) -> None: """Run a series of filter tests on apache kafka.""" for test in tests: hass.states.async_set(test.id, STATE_ON) diff --git a/tests/components/apple_tv/common.py b/tests/components/apple_tv/common.py index ddb8c1348d9..8a81536c792 100644 --- a/tests/components/apple_tv/common.py +++ b/tests/components/apple_tv/common.py @@ -1,5 +1,7 @@ """Test code shared between test files.""" +from typing import Any + from pyatv import conf, const, interface from pyatv.const import Protocol @@ -7,7 +9,7 @@ from pyatv.const import Protocol class MockPairingHandler(interface.PairingHandler): """Mock for PairingHandler in pyatv.""" - def __init__(self, *args): + def __init__(self, *args: Any) -> None: """Initialize a new MockPairingHandler.""" super().__init__(*args) self.pin_code = None diff --git a/tests/components/application_credentials/test_init.py b/tests/components/application_credentials/test_init.py index e6fdf568bcc..d90084fa7c9 100644 --- a/tests/components/application_credentials/test_init.py +++ b/tests/components/application_credentials/test_init.py @@ -124,7 +124,12 @@ def config_flow_handler( class OAuthFixture: """Fixture to facilitate testing an OAuth flow.""" - def __init__(self, hass, hass_client, aioclient_mock): + def __init__( + self, + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + ) -> None: """Initialize OAuthFixture.""" self.hass = hass self.hass_client = hass_client @@ -184,7 +189,7 @@ async def oauth_fixture( class Client: """Test client with helper methods for application credentials websocket.""" - def __init__(self, client): + def __init__(self, client) -> None: """Initialize Client.""" self.client = client self.id = 0 diff --git a/tests/components/apsystems/conftest.py b/tests/components/apsystems/conftest.py index c191c7ca2dc..0feccf21578 100644 --- a/tests/components/apsystems/conftest.py +++ b/tests/components/apsystems/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from APsystemsEZ1 import ReturnDeviceInfo, ReturnOutputData, Status +from APsystemsEZ1 import ReturnAlarmInfo, ReturnDeviceInfo, ReturnOutputData import pytest from homeassistant.components.apsystems.const import DOMAIN @@ -52,7 +52,13 @@ def mock_apsystems() -> Generator[MagicMock]: e2=6.0, te2=7.0, ) - mock_api.get_device_power_status.return_value = Status.normal + mock_api.get_alarm_info.return_value = ReturnAlarmInfo( + offgrid=False, + shortcircuit_1=True, + shortcircuit_2=False, + operating=False, + ) + mock_api.get_device_power_status.return_value = True yield mock_api diff --git a/tests/components/apsystems/snapshots/test_binary_sensor.ambr b/tests/components/apsystems/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..0875c88976b --- /dev/null +++ b/tests/components/apsystems/snapshots/test_binary_sensor.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.mock_title_dc_1_short_circuit_error_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_dc_1_short_circuit_error_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC 1 short circuit error status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_1_short_circuit_error_status', + 'unique_id': 'MY_SERIAL_NUMBER_dc_1_short_circuit_error_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_1_short_circuit_error_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title DC 1 short circuit error status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_dc_1_short_circuit_error_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_2_short_circuit_error_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_dc_2_short_circuit_error_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DC 2 short circuit error status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'dc_2_short_circuit_error_status', + 'unique_id': 'MY_SERIAL_NUMBER_dc_2_short_circuit_error_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_dc_2_short_circuit_error_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title DC 2 short circuit error status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_dc_2_short_circuit_error_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_off_grid_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_off_grid_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Off grid status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'off_grid_status', + 'unique_id': 'MY_SERIAL_NUMBER_off_grid_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_off_grid_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Off grid status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_off_grid_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_output_fault_status-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mock_title_output_fault_status', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Output fault status', + 'platform': 'apsystems', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'output_fault_status', + 'unique_id': 'MY_SERIAL_NUMBER_output_fault_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mock_title_output_fault_status-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Mock Title Output fault status', + }), + 'context': , + 'entity_id': 'binary_sensor.mock_title_output_fault_status', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/apsystems/test_binary_sensor.py b/tests/components/apsystems/test_binary_sensor.py new file mode 100644 index 00000000000..0c6fbffc93c --- /dev/null +++ b/tests/components/apsystems/test_binary_sensor.py @@ -0,0 +1,31 @@ +"""Test the APSystem binary sensor module.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_apsystems: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.apsystems.PLATFORMS", + [Platform.BINARY_SENSOR], + ): + await setup_integration(hass, mock_config_entry) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry.entry_id + ) diff --git a/tests/components/aquacell/__init__.py b/tests/components/aquacell/__init__.py index c54bc539496..9190172145a 100644 --- a/tests/components/aquacell/__init__.py +++ b/tests/components/aquacell/__init__.py @@ -1,6 +1,9 @@ """Tests for the Aquacell integration.""" +from aioaquacell import Brand + from homeassistant.components.aquacell.const import ( + CONF_BRAND, CONF_REFRESH_TOKEN, CONF_REFRESH_TOKEN_CREATION_TIME, ) @@ -14,11 +17,20 @@ TEST_CONFIG_ENTRY = { CONF_PASSWORD: "test-password", CONF_REFRESH_TOKEN: "refresh-token", CONF_REFRESH_TOKEN_CREATION_TIME: 0, + CONF_BRAND: Brand.AQUACELL, +} + +TEST_CONFIG_ENTRY_WITHOUT_BRAND = { + CONF_EMAIL: "test@test.com", + CONF_PASSWORD: "test-password", + CONF_REFRESH_TOKEN: "refresh-token", + CONF_REFRESH_TOKEN_CREATION_TIME: 0, } TEST_USER_INPUT = { CONF_EMAIL: "test@test.com", CONF_PASSWORD: "test-password", + CONF_BRAND: "aquacell", } DSN = "DSN" diff --git a/tests/components/aquacell/conftest.py b/tests/components/aquacell/conftest.py index f5a741ceed8..443f7da77ce 100644 --- a/tests/components/aquacell/conftest.py +++ b/tests/components/aquacell/conftest.py @@ -13,7 +13,7 @@ from homeassistant.components.aquacell.const import ( ) from homeassistant.const import CONF_EMAIL -from . import TEST_CONFIG_ENTRY +from . import TEST_CONFIG_ENTRY, TEST_CONFIG_ENTRY_WITHOUT_BRAND from tests.common import MockConfigEntry, load_json_array_fixture @@ -76,3 +76,17 @@ def mock_config_entry() -> MockConfigEntry: CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), }, ) + + +@pytest.fixture +def mock_config_entry_without_brand() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="Aquacell", + unique_id=TEST_CONFIG_ENTRY[CONF_EMAIL], + data={ + **TEST_CONFIG_ENTRY_WITHOUT_BRAND, + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), + }, + ) diff --git a/tests/components/aquacell/test_config_flow.py b/tests/components/aquacell/test_config_flow.py index b6bcb82293c..b73852d513f 100644 --- a/tests/components/aquacell/test_config_flow.py +++ b/tests/components/aquacell/test_config_flow.py @@ -5,7 +5,11 @@ from unittest.mock import AsyncMock from aioaquacell import ApiException, AuthenticationFailed import pytest -from homeassistant.components.aquacell.const import CONF_REFRESH_TOKEN, DOMAIN +from homeassistant.components.aquacell.const import ( + CONF_BRAND, + CONF_REFRESH_TOKEN, + DOMAIN, +) from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant @@ -51,7 +55,9 @@ async def test_full_flow( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": SOURCE_USER} ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" assert result["errors"] == {} result2 = await hass.config_entries.flow.async_configure( @@ -65,6 +71,7 @@ async def test_full_flow( assert result2["data"][CONF_EMAIL] == TEST_CONFIG_ENTRY[CONF_EMAIL] assert result2["data"][CONF_PASSWORD] == TEST_CONFIG_ENTRY[CONF_PASSWORD] assert result2["data"][CONF_REFRESH_TOKEN] == TEST_CONFIG_ENTRY[CONF_REFRESH_TOKEN] + assert result2["data"][CONF_BRAND] == TEST_CONFIG_ENTRY[CONF_BRAND] assert len(mock_setup_entry.mock_calls) == 1 @@ -109,4 +116,5 @@ async def test_form_exceptions( assert result3["data"][CONF_EMAIL] == TEST_CONFIG_ENTRY[CONF_EMAIL] assert result3["data"][CONF_PASSWORD] == TEST_CONFIG_ENTRY[CONF_PASSWORD] assert result3["data"][CONF_REFRESH_TOKEN] == TEST_CONFIG_ENTRY[CONF_REFRESH_TOKEN] + assert result3["data"][CONF_BRAND] == TEST_CONFIG_ENTRY[CONF_BRAND] assert len(mock_setup_entry.mock_calls) == 1 diff --git a/tests/components/aquacell/test_init.py b/tests/components/aquacell/test_init.py index a70d077e180..580d87f4d9a 100644 --- a/tests/components/aquacell/test_init.py +++ b/tests/components/aquacell/test_init.py @@ -38,6 +38,17 @@ async def test_load_unload_entry( assert entry.state is ConfigEntryState.NOT_LOADED +async def test_load_withoutbrand( + hass: HomeAssistant, + mock_aquacell_api: AsyncMock, + mock_config_entry_without_brand: MockConfigEntry, +) -> None: + """Test load entry without brand.""" + await setup_integration(hass, mock_config_entry_without_brand) + + assert mock_config_entry_without_brand.state is ConfigEntryState.LOADED + + async def test_coordinator_update_valid_refresh_token( hass: HomeAssistant, mock_aquacell_api: AsyncMock, diff --git a/tests/components/aranet/__init__.py b/tests/components/aranet/__init__.py index 18bebfb44a4..711c605fd28 100644 --- a/tests/components/aranet/__init__.py +++ b/tests/components/aranet/__init__.py @@ -82,3 +82,11 @@ VALID_ARANET_RADIATION_DATA_SERVICE_INFO = fake_service_info( 1794: b"\x02!&\x04\x01\x00`-\x00\x00\x08\x98\x05\x00n\x00\x00d\x00,\x01\xfd\x00\xc7" }, ) + +VALID_ARANET_RADON_DATA_SERVICE_INFO = fake_service_info( + "AranetRn+ 12345", + "0000fce0-0000-1000-8000-00805f9b34fb", + { + 1794: b"\x03!\x04\x06\x01\x00\x00\x00\x07\x00\xfe\x01\xc9'\xce\x01\x00d\x01X\x02\xf6\x01\x08" + }, +) diff --git a/tests/components/aranet/test_sensor.py b/tests/components/aranet/test_sensor.py index c932a92c1e8..7bd00af4837 100644 --- a/tests/components/aranet/test_sensor.py +++ b/tests/components/aranet/test_sensor.py @@ -11,6 +11,7 @@ from . import ( DISABLED_INTEGRATIONS_SERVICE_INFO, VALID_ARANET2_DATA_SERVICE_INFO, VALID_ARANET_RADIATION_DATA_SERVICE_INFO, + VALID_ARANET_RADON_DATA_SERVICE_INFO, VALID_DATA_SERVICE_INFO, ) @@ -188,6 +189,71 @@ async def test_sensors_aranet4(hass: HomeAssistant) -> None: await hass.async_block_till_done() +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors_aranetrn(hass: HomeAssistant) -> None: + """Test setting up creates the sensors for Aranet Radon device.""" + entry = MockConfigEntry( + domain=DOMAIN, + unique_id="aa:bb:cc:dd:ee:ff", + ) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert len(hass.states.async_all("sensor")) == 0 + inject_bluetooth_service_info(hass, VALID_ARANET_RADON_DATA_SERVICE_INFO) + await hass.async_block_till_done() + assert len(hass.states.async_all("sensor")) == 6 + + batt_sensor = hass.states.get("sensor.aranetrn_12345_battery") + batt_sensor_attrs = batt_sensor.attributes + assert batt_sensor.state == "100" + assert batt_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Battery" + assert batt_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert batt_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + co2_sensor = hass.states.get("sensor.aranetrn_12345_radon_concentration") + co2_sensor_attrs = co2_sensor.attributes + assert co2_sensor.state == "7" + assert co2_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Radon Concentration" + assert co2_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "Bq/m³" + assert co2_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + humid_sensor = hass.states.get("sensor.aranetrn_12345_humidity") + humid_sensor_attrs = humid_sensor.attributes + assert humid_sensor.state == "46.2" + assert humid_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Humidity" + assert humid_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "%" + assert humid_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + temp_sensor = hass.states.get("sensor.aranetrn_12345_temperature") + temp_sensor_attrs = temp_sensor.attributes + assert temp_sensor.state == "25.5" + assert temp_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Temperature" + assert temp_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "°C" + assert temp_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + press_sensor = hass.states.get("sensor.aranetrn_12345_pressure") + press_sensor_attrs = press_sensor.attributes + assert press_sensor.state == "1018.5" + assert press_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Pressure" + assert press_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "hPa" + assert press_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + interval_sensor = hass.states.get("sensor.aranetrn_12345_update_interval") + interval_sensor_attrs = interval_sensor.attributes + assert interval_sensor.state == "600" + assert ( + interval_sensor_attrs[ATTR_FRIENDLY_NAME] == "AranetRn+ 12345 Update Interval" + ) + assert interval_sensor_attrs[ATTR_UNIT_OF_MEASUREMENT] == "s" + assert interval_sensor_attrs[ATTR_STATE_CLASS] == "measurement" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + + @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_smart_home_integration_disabled(hass: HomeAssistant) -> None: """Test disabling smart home integration marks entities as unavailable.""" diff --git a/tests/components/aseko_pool_live/test_config_flow.py b/tests/components/aseko_pool_live/test_config_flow.py index 4307e527cee..e4dedf36da4 100644 --- a/tests/components/aseko_pool_live/test_config_flow.py +++ b/tests/components/aseko_pool_live/test_config_flow.py @@ -133,13 +133,7 @@ async def test_async_step_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -179,13 +173,7 @@ async def test_async_step_reauth_exception( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( "homeassistant.components.aseko_pool_live.config_flow.WebAccount.login", diff --git a/tests/components/assist_pipeline/conftest.py b/tests/components/assist_pipeline/conftest.py index b2eca1e7ce1..0f6872edbfe 100644 --- a/tests/components/assist_pipeline/conftest.py +++ b/tests/components/assist_pipeline/conftest.py @@ -23,7 +23,7 @@ from homeassistant.components.assist_pipeline.pipeline import ( ) from homeassistant.config_entries import ConfigEntry, ConfigFlow from homeassistant.const import Platform -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component @@ -36,6 +36,8 @@ from tests.common import ( mock_integration, mock_platform, ) +from tests.components.stt.common import MockSTTProvider, MockSTTProviderEntity +from tests.components.tts.common import MockTTSProvider _TRANSCRIPT = "test transcript" @@ -43,110 +45,8 @@ BYTES_ONE_SECOND = SAMPLE_RATE * SAMPLE_WIDTH * SAMPLE_CHANNELS @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir - - -class BaseProvider: - """Mock STT provider.""" - - _supported_languages = ["en-US"] - - def __init__(self, text: str) -> None: - """Init test provider.""" - self.text = text - self.received: list[bytes] = [] - - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return self._supported_languages - - @property - def supported_formats(self) -> list[stt.AudioFormats]: - """Return a list of supported formats.""" - return [stt.AudioFormats.WAV] - - @property - def supported_codecs(self) -> list[stt.AudioCodecs]: - """Return a list of supported codecs.""" - return [stt.AudioCodecs.PCM] - - @property - def supported_bit_rates(self) -> list[stt.AudioBitRates]: - """Return a list of supported bitrates.""" - return [stt.AudioBitRates.BITRATE_16] - - @property - def supported_sample_rates(self) -> list[stt.AudioSampleRates]: - """Return a list of supported samplerates.""" - return [stt.AudioSampleRates.SAMPLERATE_16000] - - @property - def supported_channels(self) -> list[stt.AudioChannels]: - """Return a list of supported channels.""" - return [stt.AudioChannels.CHANNEL_MONO] - - async def async_process_audio_stream( - self, metadata: stt.SpeechMetadata, stream: AsyncIterable[bytes] - ) -> stt.SpeechResult: - """Process an audio stream.""" - async for data in stream: - if not data: - break - self.received.append(data) - return stt.SpeechResult(self.text, stt.SpeechResultState.SUCCESS) - - -class MockSttProvider(BaseProvider, stt.Provider): - """Mock provider.""" - - -class MockSttProviderEntity(BaseProvider, stt.SpeechToTextEntity): - """Mock provider entity.""" - - _attr_name = "Mock STT" - - -class MockTTSProvider(tts.Provider): - """Mock TTS provider.""" - - name = "Test" - _supported_languages = ["en-US"] - _supported_voices = { - "en-US": [ - tts.Voice("james_earl_jones", "James Earl Jones"), - tts.Voice("fran_drescher", "Fran Drescher"), - ] - } - _supported_options = ["voice", "age", tts.ATTR_AUDIO_OUTPUT] - - @property - def default_language(self) -> str: - """Return the default language.""" - return "en" - - @property - def supported_languages(self) -> list[str]: - """Return list of supported languages.""" - return self._supported_languages - - @callback - def async_get_supported_voices(self, language: str) -> list[tts.Voice] | None: - """Return a list of supported voices for a language.""" - return self._supported_voices.get(language) - - @property - def supported_options(self) -> list[str]: - """Return list of supported options like voice, emotions.""" - return self._supported_options - - def get_tts_audio( - self, message: str, language: str, options: dict[str, Any] - ) -> tts.TtsAudioType: - """Load TTS data.""" - return ("mp3", b"") class MockTTSPlatform(MockPlatform): @@ -154,7 +54,7 @@ class MockTTSPlatform(MockPlatform): PLATFORM_SCHEMA = tts.PLATFORM_SCHEMA - def __init__(self, *, async_get_engine, **kwargs): + def __init__(self, *, async_get_engine, **kwargs: Any) -> None: """Initialize the tts platform.""" super().__init__(**kwargs) self.async_get_engine = async_get_engine @@ -163,25 +63,29 @@ class MockTTSPlatform(MockPlatform): @pytest.fixture async def mock_tts_provider() -> MockTTSProvider: """Mock TTS provider.""" - return MockTTSProvider() + provider = MockTTSProvider("en") + provider._supported_languages = ["en-US"] + return provider @pytest.fixture -async def mock_stt_provider() -> MockSttProvider: +async def mock_stt_provider() -> MockSTTProvider: """Mock STT provider.""" - return MockSttProvider(_TRANSCRIPT) + return MockSTTProvider(supported_languages=["en-US"], text=_TRANSCRIPT) @pytest.fixture -def mock_stt_provider_entity() -> MockSttProviderEntity: +def mock_stt_provider_entity() -> MockSTTProviderEntity: """Test provider entity fixture.""" - return MockSttProviderEntity(_TRANSCRIPT) + entity = MockSTTProviderEntity(supported_languages=["en-US"], text=_TRANSCRIPT) + entity._attr_name = "Mock STT" + return entity class MockSttPlatform(MockPlatform): """Provide a fake STT platform.""" - def __init__(self, *, async_get_engine, **kwargs): + def __init__(self, *, async_get_engine, **kwargs: Any) -> None: """Initialize the stt platform.""" super().__init__(**kwargs) self.async_get_engine = async_get_engine @@ -291,8 +195,8 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: @pytest.fixture async def init_supporting_components( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider: MockSTTProvider, + mock_stt_provider_entity: MockSTTProviderEntity, mock_tts_provider: MockTTSProvider, mock_wake_word_provider_entity: MockWakeWordEntity, mock_wake_word_provider_entity2: MockWakeWordEntity2, diff --git a/tests/components/assist_pipeline/snapshots/test_init.ambr b/tests/components/assist_pipeline/snapshots/test_init.ambr index 8124ed4ab85..7f29534e473 100644 --- a/tests/components/assist_pipeline/snapshots/test_init.ambr +++ b/tests/components/assist_pipeline/snapshots/test_init.ambr @@ -10,7 +10,7 @@ }), dict({ 'data': dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': , 'channel': , @@ -301,7 +301,7 @@ }), dict({ 'data': dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': , 'channel': , diff --git a/tests/components/assist_pipeline/snapshots/test_websocket.ambr b/tests/components/assist_pipeline/snapshots/test_websocket.ambr index e5ae18d28f2..7ea6af7e0bd 100644 --- a/tests/components/assist_pipeline/snapshots/test_websocket.ambr +++ b/tests/components/assist_pipeline/snapshots/test_websocket.ambr @@ -11,7 +11,7 @@ # --- # name: test_audio_pipeline.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -92,7 +92,7 @@ # --- # name: test_audio_pipeline_debug.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -185,7 +185,7 @@ # --- # name: test_audio_pipeline_with_enhancements.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -288,7 +288,7 @@ # --- # name: test_audio_pipeline_with_wake_word_no_timeout.3 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -401,7 +401,7 @@ # --- # name: test_device_capture.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -427,7 +427,7 @@ # --- # name: test_device_capture_override.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -475,7 +475,7 @@ # --- # name: test_device_capture_queue_full.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -649,7 +649,7 @@ # --- # name: test_stt_stream_failed.1 dict({ - 'engine': 'test', + 'engine': 'stt.mock_stt', 'metadata': dict({ 'bit_rate': 16, 'channel': 1, @@ -663,9 +663,6 @@ # name: test_stt_stream_failed.2 None # --- -# name: test_text_only_pipeline.3 - None -# --- # name: test_text_only_pipeline[extra_msg0] dict({ 'language': 'en', diff --git a/tests/components/assist_pipeline/test_init.py b/tests/components/assist_pipeline/test_init.py index 4206a288331..31cc1268098 100644 --- a/tests/components/assist_pipeline/test_init.py +++ b/tests/components/assist_pipeline/test_init.py @@ -22,8 +22,8 @@ from homeassistant.setup import async_setup_component from .conftest import ( BYTES_ONE_SECOND, - MockSttProvider, - MockSttProviderEntity, + MockSTTProvider, + MockSTTProviderEntity, MockTTSProvider, MockWakeWordEntity, make_10ms_chunk, @@ -47,7 +47,7 @@ def process_events(events: list[assist_pipeline.PipelineEvent]) -> list[dict]: async def test_pipeline_from_audio_stream_auto( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider_entity: MockSTTProviderEntity, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -80,15 +80,15 @@ async def test_pipeline_from_audio_stream_auto( ) assert process_events(events) == snapshot - assert len(mock_stt_provider.received) == 2 - assert mock_stt_provider.received[0].startswith(b"part1") - assert mock_stt_provider.received[1].startswith(b"part2") + assert len(mock_stt_provider_entity.received) == 2 + assert mock_stt_provider_entity.received[0].startswith(b"part1") + assert mock_stt_provider_entity.received[1].startswith(b"part2") async def test_pipeline_from_audio_stream_legacy( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -153,7 +153,7 @@ async def test_pipeline_from_audio_stream_legacy( async def test_pipeline_from_audio_stream_entity( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider_entity: MockSttProviderEntity, + mock_stt_provider_entity: MockSTTProviderEntity, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -218,7 +218,7 @@ async def test_pipeline_from_audio_stream_entity( async def test_pipeline_from_audio_stream_no_stt( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -281,7 +281,7 @@ async def test_pipeline_from_audio_stream_no_stt( async def test_pipeline_from_audio_stream_unknown_pipeline( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, init_components, snapshot: SnapshotAssertion, ) -> None: @@ -319,7 +319,7 @@ async def test_pipeline_from_audio_stream_unknown_pipeline( async def test_pipeline_from_audio_stream_wake_word( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider_entity: MockSTTProviderEntity, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, snapshot: SnapshotAssertion, @@ -381,21 +381,21 @@ async def test_pipeline_from_audio_stream_wake_word( # 2. queued audio (from mock wake word entity) # 3. part1 # 4. part2 - assert len(mock_stt_provider.received) > 3 + assert len(mock_stt_provider_entity.received) > 3 first_chunk = bytes( - [c_byte for c in mock_stt_provider.received[:-3] for c_byte in c] + [c_byte for c in mock_stt_provider_entity.received[:-3] for c_byte in c] ) assert first_chunk == wake_chunk_1[len(wake_chunk_1) // 2 :] + wake_chunk_2 - assert mock_stt_provider.received[-3] == b"queued audio" - assert mock_stt_provider.received[-2].startswith(b"part1") - assert mock_stt_provider.received[-1].startswith(b"part2") + assert mock_stt_provider_entity.received[-3] == b"queued audio" + assert mock_stt_provider_entity.received[-2].startswith(b"part1") + assert mock_stt_provider_entity.received[-1].startswith(b"part2") async def test_pipeline_save_audio( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -474,7 +474,7 @@ async def test_pipeline_save_audio( async def test_pipeline_saved_audio_with_device_id( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -529,7 +529,7 @@ async def test_pipeline_saved_audio_with_device_id( async def test_pipeline_saved_audio_write_error( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -578,7 +578,7 @@ async def test_pipeline_saved_audio_write_error( async def test_pipeline_saved_audio_empty_queue( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_supporting_components, snapshot: SnapshotAssertion, @@ -641,7 +641,7 @@ async def test_pipeline_saved_audio_empty_queue( async def test_wake_word_detection_aborted( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider: MockSTTProvider, mock_wake_word_provider_entity: MockWakeWordEntity, init_components, pipeline_data: assist_pipeline.pipeline.PipelineData, diff --git a/tests/components/assist_pipeline/test_pipeline.py b/tests/components/assist_pipeline/test_pipeline.py index 45a661c0f07..50d0fc9bed8 100644 --- a/tests/components/assist_pipeline/test_pipeline.py +++ b/tests/components/assist_pipeline/test_pipeline.py @@ -26,7 +26,7 @@ from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MANY_LANGUAGES -from .conftest import MockSttProvider, MockTTSProvider +from .conftest import MockSTTProviderEntity, MockTTSProvider from tests.common import flush_store @@ -398,7 +398,7 @@ async def test_default_pipeline_no_stt_tts( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline( hass: HomeAssistant, - mock_stt_provider: MockSttProvider, + mock_stt_provider_entity: MockSTTProviderEntity, mock_tts_provider: MockTTSProvider, ha_language: str, ha_country: str | None, @@ -412,7 +412,7 @@ async def test_default_pipeline( hass.config.language = ha_language with ( - patch.object(mock_stt_provider, "_supported_languages", MANY_LANGUAGES), + patch.object(mock_stt_provider_entity, "_supported_languages", MANY_LANGUAGES), patch.object(mock_tts_provider, "_supported_languages", MANY_LANGUAGES), ): assert await async_setup_component(hass, "assist_pipeline", {}) @@ -429,7 +429,7 @@ async def test_default_pipeline( id=pipeline.id, language=pipeline_language, name="Home Assistant", - stt_engine="test", + stt_engine="stt.mock_stt", stt_language=stt_language, tts_engine="test", tts_language=tts_language, @@ -441,10 +441,10 @@ async def test_default_pipeline( @pytest.mark.usefixtures("init_supporting_components") async def test_default_pipeline_unsupported_stt_language( - hass: HomeAssistant, mock_stt_provider: MockSttProvider + hass: HomeAssistant, mock_stt_provider_entity: MockSTTProviderEntity ) -> None: """Test async_get_pipeline.""" - with patch.object(mock_stt_provider, "_supported_languages", ["smurfish"]): + with patch.object(mock_stt_provider_entity, "_supported_languages", ["smurfish"]): assert await async_setup_component(hass, "assist_pipeline", {}) pipeline_data: PipelineData = hass.data[DOMAIN] @@ -489,7 +489,7 @@ async def test_default_pipeline_unsupported_tts_language( id=pipeline.id, language="en", name="Home Assistant", - stt_engine="test", + stt_engine="stt.mock_stt", stt_language="en-US", tts_engine=None, tts_language=None, diff --git a/tests/components/assist_pipeline/test_vad.py b/tests/components/assist_pipeline/test_vad.py index 17cb73a9139..fda26d2fb94 100644 --- a/tests/components/assist_pipeline/test_vad.py +++ b/tests/components/assist_pipeline/test_vad.py @@ -17,15 +17,12 @@ def test_silence() -> None: # True return value indicates voice command has not finished assert segmenter.process(_ONE_SECOND * 3, False) + assert not segmenter.in_command def test_speech() -> None: """Test that silence + speech + silence triggers a voice command.""" - def is_speech(chunk): - """Anything non-zero is speech.""" - return sum(chunk) > 0 - segmenter = VoiceCommandSegmenter() # silence @@ -33,10 +30,12 @@ def test_speech() -> None: # "speech" assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command # silence # False return value indicates voice command is finished assert not segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command def test_audio_buffer() -> None: @@ -105,3 +104,125 @@ def test_chunk_samples_leftover() -> None: assert len(chunks) == 1 assert leftover_chunk_buffer.bytes() == bytes([5, 6]) + + +def test_silence_seconds() -> None: + """Test end of voice command silence seconds.""" + + segmenter = VoiceCommandSegmenter(silence_seconds=1.0) + + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # "speech" + assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # exactly enough silence now + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.in_command + + +def test_silence_reset() -> None: + """Test that speech resets end of voice command detection.""" + + segmenter = VoiceCommandSegmenter(silence_seconds=1.0, reset_seconds=0.5) + + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # "speech" + assert segmenter.process(_ONE_SECOND, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # speech should reset silence detection + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.in_command + + # not enough silence to end + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.in_command + + # exactly enough silence now + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.in_command + + +def test_speech_reset() -> None: + """Test that silence resets start of voice command detection.""" + + segmenter = VoiceCommandSegmenter( + silence_seconds=1.0, reset_seconds=0.5, speech_seconds=1.0 + ) + + # silence + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # not enough speech to start voice command + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.in_command + + # silence should reset speech detection + assert segmenter.process(_ONE_SECOND, False) + assert not segmenter.in_command + + # not enough speech to start voice command + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.in_command + + # exactly enough speech now + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.in_command + + +def test_timeout() -> None: + """Test that voice command detection times out.""" + + segmenter = VoiceCommandSegmenter(timeout_seconds=1.0) + + # not enough to time out + assert not segmenter.timed_out + assert segmenter.process(_ONE_SECOND * 0.5, False) + assert not segmenter.timed_out + + # enough to time out + assert not segmenter.process(_ONE_SECOND * 0.5, True) + assert segmenter.timed_out + + # flag resets with more audio + assert segmenter.process(_ONE_SECOND * 0.5, True) + assert not segmenter.timed_out + + assert not segmenter.process(_ONE_SECOND * 0.5, False) + assert segmenter.timed_out + + +def test_command_seconds() -> None: + """Test minimum number of seconds for voice command.""" + + segmenter = VoiceCommandSegmenter( + command_seconds=3, speech_seconds=1, silence_seconds=1, reset_seconds=1 + ) + + assert segmenter.process(_ONE_SECOND, True) + + # Silence counts towards total command length + assert segmenter.process(_ONE_SECOND * 0.5, False) + + # Enough to finish command now + assert segmenter.process(_ONE_SECOND, True) + assert segmenter.process(_ONE_SECOND * 0.5, False) + + # Silence to finish + assert not segmenter.process(_ONE_SECOND * 0.5, False) diff --git a/tests/components/assist_pipeline/test_websocket.py b/tests/components/assist_pipeline/test_websocket.py index 2da914f4252..e339ee74fbb 100644 --- a/tests/components/assist_pipeline/test_websocket.py +++ b/tests/components/assist_pipeline/test_websocket.py @@ -682,7 +682,7 @@ async def test_stt_provider_missing( ) -> None: """Test events from a pipeline run with a non-existent STT provider.""" with patch( - "homeassistant.components.stt.async_get_provider", + "homeassistant.components.stt.async_get_speech_to_text_entity", return_value=None, ): client = await hass_ws_client(hass) @@ -708,11 +708,11 @@ async def test_stt_provider_bad_metadata( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, init_components, - mock_stt_provider, + mock_stt_provider_entity, snapshot: SnapshotAssertion, ) -> None: """Test events from a pipeline run with wrong metadata.""" - with patch.object(mock_stt_provider, "check_metadata", return_value=False): + with patch.object(mock_stt_provider_entity, "check_metadata", return_value=False): client = await hass_ws_client(hass) await client.send_json_auto_id( @@ -743,7 +743,7 @@ async def test_stt_stream_failed( client = await hass_ws_client(hass) with patch( - "tests.components.assist_pipeline.conftest.MockSttProvider.async_process_audio_stream", + "tests.components.assist_pipeline.conftest.MockSTTProviderEntity.async_process_audio_stream", side_effect=RuntimeError, ): await client.send_json_auto_id( @@ -1188,7 +1188,7 @@ async def test_get_pipeline( "id": ANY, "language": "en", "name": "Home Assistant", - "stt_engine": "test", + "stt_engine": "stt.mock_stt", "stt_language": "en-US", "tts_engine": "test", "tts_language": "en-US", @@ -1213,7 +1213,7 @@ async def test_get_pipeline( "language": "en", "name": "Home Assistant", # It found these defaults - "stt_engine": "test", + "stt_engine": "stt.mock_stt", "stt_language": "en-US", "tts_engine": "test", "tts_language": "en-US", @@ -1297,7 +1297,7 @@ async def test_list_pipelines( "id": ANY, "language": "en", "name": "Home Assistant", - "stt_engine": "test", + "stt_engine": "stt.mock_stt", "stt_language": "en-US", "tts_engine": "test", "tts_language": "en-US", diff --git a/tests/components/asterisk_mbox/__init__.py b/tests/components/asterisk_mbox/__init__.py deleted file mode 100644 index 79e3675ad07..00000000000 --- a/tests/components/asterisk_mbox/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the asterisk component.""" diff --git a/tests/components/asterisk_mbox/const.py b/tests/components/asterisk_mbox/const.py deleted file mode 100644 index 945c6b28d30..00000000000 --- a/tests/components/asterisk_mbox/const.py +++ /dev/null @@ -1,12 +0,0 @@ -"""Asterisk tests constants.""" - -from homeassistant.components.asterisk_mbox import DOMAIN -from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT - -CONFIG = { - DOMAIN: { - CONF_HOST: "localhost", - CONF_PASSWORD: "password", - CONF_PORT: 1234, - } -} diff --git a/tests/components/asterisk_mbox/test_init.py b/tests/components/asterisk_mbox/test_init.py deleted file mode 100644 index d7567ea3286..00000000000 --- a/tests/components/asterisk_mbox/test_init.py +++ /dev/null @@ -1,36 +0,0 @@ -"""Test mailbox.""" - -from collections.abc import Generator -from unittest.mock import Mock, patch - -import pytest - -from homeassistant.components.asterisk_mbox import DOMAIN -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.setup import async_setup_component - -from .const import CONFIG - - -@pytest.fixture -def client() -> Generator[Mock]: - """Mock client.""" - with patch( - "homeassistant.components.asterisk_mbox.asteriskClient", autospec=True - ) as client: - yield client - - -async def test_repair_issue_is_created( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - client: Mock, -) -> None: - """Test repair issue is created.""" - assert await async_setup_component(hass, DOMAIN, CONFIG) - await hass.async_block_till_done() - assert ( - DOMAIN, - "deprecated_integration", - ) in issue_registry.issues diff --git a/tests/components/august/mocks.py b/tests/components/august/mocks.py index a0f5b55a607..43cc4957445 100644 --- a/tests/components/august/mocks.py +++ b/tests/components/august/mocks.py @@ -18,6 +18,7 @@ from yalexs.activity import ( ACTIVITY_ACTIONS_LOCK_OPERATION, SOURCE_LOCK_OPERATE, SOURCE_LOG, + Activity, BridgeOperationActivity, DoorbellDingActivity, DoorbellMotionActivity, @@ -65,8 +66,8 @@ def _timetoken(): @patch("yalexs.manager.gateway.ApiAsync") @patch("yalexs.manager.gateway.AuthenticatorAsync.async_authenticate") async def _mock_setup_august( - hass, api_instance, pubnub_mock, authenticate_mock, api_mock, brand -): + hass: HomeAssistant, api_instance, pubnub_mock, authenticate_mock, api_mock, brand +) -> MockConfigEntry: """Set up august integration.""" authenticate_mock.side_effect = MagicMock( return_value=_mock_august_authentication( @@ -81,10 +82,7 @@ async def _mock_setup_august( ) entry.add_to_hass(hass) with ( - patch( - "yalexs.manager.data.async_create_pubnub", - return_value=AsyncMock(), - ), + patch.object(pubnub_mock, "run"), patch("yalexs.manager.data.AugustPubNub", return_value=pubnub_mock), ): assert await hass.config_entries.async_setup(entry.entry_id) @@ -107,13 +105,13 @@ async def _create_august_with_devices( async def _create_august_api_with_devices( - hass, - devices, - api_call_side_effects=None, - activities=None, - pubnub=None, - brand=Brand.AUGUST, -): + hass: HomeAssistant, + devices: Iterable[LockDetail | DoorbellDetail], + api_call_side_effects: dict[str, Any] | None = None, + activities: list[Any] | None = None, + pubnub: AugustPubNub | None = None, + brand: Brand = Brand.AUGUST, +) -> tuple[MockConfigEntry, MagicMock]: if api_call_side_effects is None: api_call_side_effects = {} if pubnub is None: @@ -215,7 +213,10 @@ async def _create_august_api_with_devices( async def _mock_setup_august_with_api_side_effects( - hass, api_call_side_effects, pubnub, brand=Brand.AUGUST + hass: HomeAssistant, + api_call_side_effects: dict[str, Any], + pubnub: AugustPubNub, + brand: Brand = Brand.AUGUST, ): api_instance = MagicMock(name="Api", brand=brand) @@ -335,19 +336,21 @@ def _mock_august_lock_data(lockid="mocklockid1", houseid="mockhouseid1"): } -async def _mock_operative_august_lock_detail(hass): +async def _mock_operative_august_lock_detail(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online.json") -async def _mock_lock_with_offline_key(hass): +async def _mock_lock_with_offline_key(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online_with_keys.json") -async def _mock_inoperative_august_lock_detail(hass): +async def _mock_inoperative_august_lock_detail(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.offline.json") -async def _mock_activities_from_fixture(hass, path): +async def _mock_activities_from_fixture( + hass: HomeAssistant, path: str +) -> list[Activity]: json_dict = await _load_json_fixture(hass, path) activities = [] for activity_json in json_dict: @@ -358,32 +361,32 @@ async def _mock_activities_from_fixture(hass, path): return activities -async def _mock_lock_from_fixture(hass, path): +async def _mock_lock_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: json_dict = await _load_json_fixture(hass, path) return LockDetail(json_dict) -async def _mock_doorbell_from_fixture(hass, path): +async def _mock_doorbell_from_fixture(hass: HomeAssistant, path: str) -> DoorbellDetail: json_dict = await _load_json_fixture(hass, path) return DoorbellDetail(json_dict) -async def _load_json_fixture(hass, path): +async def _load_json_fixture(hass: HomeAssistant, path: str) -> Any: fixture = await hass.async_add_executor_job( load_fixture, os.path.join("august", path) ) return json.loads(fixture) -async def _mock_doorsense_enabled_august_lock_detail(hass): +async def _mock_doorsense_enabled_august_lock_detail(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online_with_doorsense.json") -async def _mock_doorsense_missing_august_lock_detail(hass): +async def _mock_doorsense_missing_august_lock_detail(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online_missing_doorsense.json") -async def _mock_lock_with_unlatch(hass): +async def _mock_lock_with_unlatch(hass: HomeAssistant) -> LockDetail: return await _mock_lock_from_fixture(hass, "get_lock.online_with_unlatch.json") @@ -411,7 +414,7 @@ def _mock_door_operation_activity(lock, action, offset): ) -def _activity_from_dict(activity_dict): +def _activity_from_dict(activity_dict: dict[str, Any]) -> Activity | None: action = activity_dict.get("action") activity_dict["dateTime"] = time.time() * 1000 diff --git a/tests/components/august/snapshots/test_binary_sensor.ambr b/tests/components/august/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..6e95b0ce552 --- /dev/null +++ b/tests/components/august/snapshots/test_binary_sensor.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_doorbell_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'tmt100_name', + 'config_entries': , + 'configuration_url': 'https://account.august.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'august', + 'tmt100', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'August Home Inc.', + 'model': 'hydra1', + 'model_id': None, + 'name': 'tmt100 Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'tmt100 Name', + 'sw_version': '3.1.0-HYDRC75+201909251139', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/august/snapshots/test_lock.ambr b/tests/components/august/snapshots/test_lock.ambr new file mode 100644 index 00000000000..6aad3a140ca --- /dev/null +++ b/tests/components/august/snapshots/test_lock.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_lock_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'online_with_doorsense_name', + 'config_entries': , + 'configuration_url': 'https://account.august.com', + 'connections': set({ + tuple( + 'bluetooth', + '12:22', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'august', + 'online_with_doorsense', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'August Home Inc.', + 'model': 'AUG-MD01', + 'model_id': None, + 'name': 'online_with_doorsense Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'online_with_doorsense Name', + 'sw_version': 'undefined-4.3.0-1.8.14', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/august/test_binary_sensor.py b/tests/components/august/test_binary_sensor.py index 33d582de8d8..4ae300ae56b 100644 --- a/tests/components/august/test_binary_sensor.py +++ b/tests/components/august/test_binary_sensor.py @@ -1,8 +1,10 @@ """The binary_sensor tests for the august platform.""" import datetime -from unittest.mock import Mock, patch +from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion from yalexs.pubnub_async import AugustPubNub from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN @@ -36,28 +38,20 @@ async def test_doorsense(hass: HomeAssistant) -> None: hass, "get_lock.online_with_doorsense.json" ) await _create_august_with_devices(hass, [lock_one]) + states = hass.states - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF ) - assert binary_sensor_online_with_doorsense_name.state == STATE_OFF async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -69,113 +63,82 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: hass, "get_activity.bridge_offline.json" ) await _create_august_with_devices(hass, [lock_one], activities=activities) - - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + states = hass.states + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state + == STATE_UNAVAILABLE ) - assert binary_sensor_online_with_doorsense_name.state == STATE_UNAVAILABLE async def test_create_doorbell(hass: HomeAssistant) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_august_with_devices(hass, [doorbell_one]) + states = hass.states - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF - binary_sensor_k98gidt45gul_name_online = hass.states.get( - "binary_sensor.k98gidt45gul_name_connectivity" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF async def test_create_doorbell_offline(hass: HomeAssistant) -> None: """Test creation of a doorbell that is offline.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) + states = hass.states - binary_sensor_tmt100_name_motion = hass.states.get( - "binary_sensor.tmt100_name_motion" + assert states.get("binary_sensor.tmt100_name_motion").state == STATE_UNAVAILABLE + assert states.get("binary_sensor.tmt100_name_connectivity").state == STATE_OFF + assert ( + states.get("binary_sensor.tmt100_name_doorbell_ding").state == STATE_UNAVAILABLE ) - assert binary_sensor_tmt100_name_motion.state == STATE_UNAVAILABLE - binary_sensor_tmt100_name_online = hass.states.get( - "binary_sensor.tmt100_name_connectivity" - ) - assert binary_sensor_tmt100_name_online.state == STATE_OFF - binary_sensor_tmt100_name_ding = hass.states.get( - "binary_sensor.tmt100_name_doorbell_ding" - ) - assert binary_sensor_tmt100_name_ding.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( hass, "get_activity.doorbell_motion.json" ) await _create_august_with_devices(hass, [doorbell_one], activities=activities) + states = hass.states - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON - binary_sensor_k98gidt45gul_name_online = hass.states.get( - "binary_sensor.k98gidt45gul_name_connectivity" - ) - assert binary_sensor_k98gidt45gul_name_online.state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF -async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: +async def test_doorbell_update_via_pubnub( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell that can be updated via pubnub.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") pubnub = AugustPubNub() await _create_august_with_devices(hass, [doorbell_one], pubnub=pubnub) assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" - - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_OFF - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF pubnub.message( pubnub, @@ -198,10 +161,7 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" - ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_ON pubnub.message( pubnub, @@ -235,29 +195,19 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_motion = hass.states.get( - "binary_sensor.k98gidt45gul_name_motion" - ) - assert binary_sensor_k98gidt45gul_name_motion.state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_image_capture = hass.states.get( - "binary_sensor.k98gidt45gul_name_image_capture" + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_image_capture.state == STATE_OFF pubnub.message( pubnub, @@ -271,37 +221,25 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" - ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_ON - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_ON + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() - binary_sensor_k98gidt45gul_name_ding = hass.states.get( - "binary_sensor.k98gidt45gul_name_doorbell_ding" + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF ) - assert binary_sensor_k98gidt45gul_name_ding.state == STATE_OFF async def test_doorbell_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) reg_device = device_registry.async_get_device(identifiers={("august", "tmt100")}) - assert reg_device.model == "hydra1" - assert reg_device.name == "tmt100 Name" - assert reg_device.manufacturer == "August Home Inc." - assert reg_device.sw_version == "3.1.0-HYDRC75+201909251139" + assert reg_device == snapshot async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: @@ -314,11 +252,9 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: config_entry = await _create_august_with_devices( hass, [lock_one], activities=activities, pubnub=pubnub ) + states = hass.states - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.message( pubnub, @@ -330,10 +266,9 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF ) - assert binary_sensor_online_with_doorsense_name.state == STATE_OFF pubnub.message( pubnub, @@ -344,33 +279,22 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON # Ensure pubnub status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON pubnub.message( pubnub, @@ -381,17 +305,11 @@ async def test_door_sense_update_via_pubnub(hass: HomeAssistant) -> None: ), ) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - binary_sensor_online_with_doorsense_name = hass.states.get( - "binary_sensor.online_with_doorsense_name_door" - ) - assert binary_sensor_online_with_doorsense_name.state == STATE_ON + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() @@ -402,7 +320,10 @@ async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") await _create_august_with_devices(hass, [lock_one]) - ding_sensor = hass.states.get( - "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + states = hass.states + assert ( + states.get( + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + ).state + == STATE_OFF ) - assert ding_sensor.state == STATE_OFF diff --git a/tests/components/august/test_button.py b/tests/components/august/test_button.py index 8ae2bc8a70d..948b59b2286 100644 --- a/tests/components/august/test_button.py +++ b/tests/components/august/test_button.py @@ -20,5 +20,4 @@ async def test_wake_lock(hass: HomeAssistant) -> None: await hass.services.async_call( BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True ) - await hass.async_block_till_done() api_instance.async_status_async.assert_called_once() diff --git a/tests/components/august/test_camera.py b/tests/components/august/test_camera.py index 539a26cc30f..5ab7d49c3b8 100644 --- a/tests/components/august/test_camera.py +++ b/tests/components/august/test_camera.py @@ -25,14 +25,10 @@ async def test_create_doorbell( ): await _create_august_with_devices(hass, [doorbell_one], brand=Brand.AUGUST) - camera_k98gidt45gul_name_camera = hass.states.get( - "camera.k98gidt45gul_name_camera" - ) - assert camera_k98gidt45gul_name_camera.state == STATE_IDLE + camera_state = hass.states.get("camera.k98gidt45gul_name_camera") + assert camera_state.state == STATE_IDLE - url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ - "entity_picture" - ] + url = camera_state.attributes["entity_picture"] client = await hass_client_no_auth() resp = await client.get(url) diff --git a/tests/components/august/test_config_flow.py b/tests/components/august/test_config_flow.py index fdebb8d5c46..b3138342b8c 100644 --- a/tests/components/august/test_config_flow.py +++ b/tests/components/august/test_config_flow.py @@ -5,7 +5,6 @@ from unittest.mock import patch from yalexs.authenticator_common import ValidationResult from yalexs.manager.exceptions import CannotConnect, InvalidAuth, RequireValidation -from homeassistant import config_entries from homeassistant.components.august.const import ( CONF_ACCESS_TOKEN_CACHE_FILE, CONF_BRAND, @@ -14,6 +13,7 @@ from homeassistant.components.august.const import ( DOMAIN, VERIFICATION_CODE_KEY, ) +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_TIMEOUT, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -25,7 +25,7 @@ async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -66,7 +66,7 @@ async def test_form(hass: HomeAssistant) -> None: async def test_form_invalid_auth(hass: HomeAssistant) -> None: """Test we handle invalid auth.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -90,7 +90,7 @@ async def test_form_invalid_auth(hass: HomeAssistant) -> None: async def test_user_unexpected_exception(hass: HomeAssistant) -> None: """Test we handle an unexpected exception.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -115,7 +115,7 @@ async def test_user_unexpected_exception(hass: HomeAssistant) -> None: async def test_form_cannot_connect(hass: HomeAssistant) -> None: """Test we handle cannot connect error.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with patch( @@ -138,7 +138,7 @@ async def test_form_cannot_connect(hass: HomeAssistant) -> None: async def test_form_needs_validate(hass: HomeAssistant) -> None: """Test we present validation when we need to validate.""" result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) with ( @@ -248,9 +248,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -294,9 +292,7 @@ async def test_form_reauth_with_2fa(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -371,7 +367,7 @@ async def test_switching_brands(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -389,7 +385,7 @@ async def test_switching_brands(hass: HomeAssistant) -> None: result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { - CONF_BRAND: "yale_home", + CONF_BRAND: "yale_access", CONF_LOGIN_METHOD: "email", CONF_USERNAME: "my@email.tld", CONF_PASSWORD: "test-password", @@ -400,4 +396,4 @@ async def test_switching_brands(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "reauth_successful" assert len(mock_setup_entry.mock_calls) == 1 - assert entry.data[CONF_BRAND] == "yale_home" + assert entry.data[CONF_BRAND] == "yale_access" diff --git a/tests/components/august/test_event.py b/tests/components/august/test_event.py index 61b7560f462..0bb482c5b89 100644 --- a/tests/components/august/test_event.py +++ b/tests/components/august/test_event.py @@ -1,13 +1,12 @@ """The event tests for the august.""" -import datetime -from unittest.mock import Mock, patch +from unittest.mock import Mock +from freezegun.api import FrozenDateTimeFactory from yalexs.pubnub_async import AugustPubNub from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant -import homeassistant.util.dt as dt_util from .mocks import ( _create_august_with_devices, @@ -45,7 +44,9 @@ async def test_create_doorbell_offline(hass: HomeAssistant) -> None: assert doorbell_state.state == STATE_UNAVAILABLE -async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") activities = await _mock_activities_from_fixture( @@ -61,19 +62,16 @@ async def test_create_doorbell_with_motion(hass: HomeAssistant) -> None: assert doorbell_state is not None assert doorbell_state.state == STATE_UNKNOWN - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() motion_state = hass.states.get("event.k98gidt45gul_name_motion") assert motion_state.state == isotime -async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: +async def test_doorbell_update_via_pubnub( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: """Test creation of a doorbell that can be updated via pubnub.""" doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") pubnub = AugustPubNub() @@ -125,14 +123,9 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: assert motion_state.state != STATE_UNKNOWN isotime = motion_state.state - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() motion_state = hass.states.get("event.k98gidt45gul_name_motion") assert motion_state is not None @@ -155,14 +148,9 @@ async def test_doorbell_update_via_pubnub(hass: HomeAssistant) -> None: assert doorbell_state.state != STATE_UNKNOWN isotime = motion_state.state - new_time = dt_util.utcnow() + datetime.timedelta(seconds=40) - native_time = datetime.datetime.now() + datetime.timedelta(seconds=40) - with patch( - "homeassistant.components.august.util._native_datetime", - return_value=native_time, - ): - async_fire_time_changed(hass, new_time) - await hass.async_block_till_done() + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") assert doorbell_state is not None diff --git a/tests/components/august/test_gateway.py b/tests/components/august/test_gateway.py index 74266397ed5..1603aeb3ecb 100644 --- a/tests/components/august/test_gateway.py +++ b/tests/components/august/test_gateway.py @@ -22,14 +22,14 @@ async def test_refresh_access_token(hass: HomeAssistant) -> None: @patch("yalexs.manager.gateway.AuthenticatorAsync.should_refresh") @patch("yalexs.manager.gateway.AuthenticatorAsync.async_refresh_access_token") async def _patched_refresh_access_token( - hass, - new_token, - new_token_expire_time, + hass: HomeAssistant, + new_token: str, + new_token_expire_time: int, refresh_access_token_mock, should_refresh_mock, authenticate_mock, async_get_operable_locks_mock, -): +) -> None: authenticate_mock.side_effect = MagicMock( return_value=_mock_august_authentication( "original_token", 1234, AuthenticationState.AUTHENTICATED diff --git a/tests/components/august/test_init.py b/tests/components/august/test_init.py index 8261e32d668..1bbe8033ec8 100644 --- a/tests/components/august/test_init.py +++ b/tests/components/august/test_init.py @@ -5,6 +5,7 @@ from unittest.mock import Mock, patch from aiohttp import ClientResponseError import pytest from yalexs.authenticator_common import AuthenticationState +from yalexs.const import Brand from yalexs.exceptions import AugustApiAIOHTTPError from homeassistant.components.august.const import DOMAIN @@ -20,7 +21,11 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError -from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers import ( + device_registry as dr, + entity_registry as er, + issue_registry as ir, +) from homeassistant.setup import async_setup_component from .mocks import ( @@ -122,16 +127,16 @@ async def test_unlock_throws_august_api_http_error(hass: HomeAssistant) -> None: "unlock_return_activities": _unlock_return_activities_side_effect }, ) - last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - try: + + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - except HomeAssistantError as err: - last_err = err - assert str(last_err) == ( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ) async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None: @@ -152,16 +157,15 @@ async def test_lock_throws_august_api_http_error(hass: HomeAssistant) -> None: "lock_return_activities": _lock_return_activities_side_effect }, ) - last_err = None data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} - try: + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - except HomeAssistantError as err: - last_err = err - assert str(last_err) == ( - "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" - " consumable" - ) async def test_open_throws_hass_service_not_supported_error( @@ -371,6 +375,7 @@ async def test_load_unload(hass: HomeAssistant) -> None: await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED async def test_load_triggers_ble_discovery( @@ -420,3 +425,24 @@ async def test_device_remove_devices( ) response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) assert response["success"] + + +async def test_brand_migration_issue(hass: HomeAssistant) -> None: + """Test creating and removing the brand migration issue.""" + august_operative_lock = await _mock_operative_august_lock_detail(hass) + config_entry = await _create_august_with_devices( + hass, [august_operative_lock], brand=Brand.YALE_HOME + ) + + assert config_entry.state is ConfigEntryState.LOADED + + issue_reg = ir.async_get(hass) + issue_entry = issue_reg.async_get_issue(DOMAIN, "yale_brand_migration") + assert issue_entry + assert issue_entry.severity == ir.IssueSeverity.CRITICAL + assert issue_entry.translation_placeholders == { + "migrate_url": "https://my.home-assistant.io/redirect/config_flow_start?domain=yale" + } + + await hass.config_entries.async_remove(config_entry.entry_id) + assert not issue_reg.async_get_issue(DOMAIN, "yale_brand_migration") diff --git a/tests/components/august/test_lock.py b/tests/components/august/test_lock.py index 8bb71826d24..e786cebf3e1 100644 --- a/tests/components/august/test_lock.py +++ b/tests/components/august/test_lock.py @@ -6,6 +6,7 @@ from unittest.mock import Mock from aiohttp import ClientResponseError from freezegun.api import FrozenDateTimeFactory import pytest +from syrupy import SnapshotAssertion from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME from yalexs.pubnub_async import AugustPubNub @@ -43,7 +44,7 @@ from tests.common import async_fire_time_changed async def test_lock_device_registry( - hass: HomeAssistant, device_registry: dr.DeviceRegistry + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion ) -> None: """Test creation of a lock with doorsense and bridge ands up in the registry.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) @@ -52,10 +53,7 @@ async def test_lock_device_registry( reg_device = device_registry.async_get_device( identifiers={("august", "online_with_doorsense")} ) - assert reg_device.model == "AUG-MD01" - assert reg_device.sw_version == "undefined-4.3.0-1.8.14" - assert reg_device.name == "online_with_doorsense Name" - assert reg_device.manufacturer == "August Home Inc." + assert reg_device == snapshot async def test_lock_changed_by(hass: HomeAssistant) -> None: @@ -65,14 +63,10 @@ async def test_lock_changed_by(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED - - assert ( - lock_online_with_doorsense_name.attributes.get("changed_by") - == "Your favorite elven princess" - ) + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["changed_by"] == "Your favorite elven princess" async def test_state_locking(hass: HomeAssistant) -> None: @@ -82,9 +76,7 @@ async def test_state_locking(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_LOCKING async def test_state_unlocking(hass: HomeAssistant) -> None: @@ -96,9 +88,7 @@ async def test_state_unlocking(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING async def test_state_jammed(hass: HomeAssistant) -> None: @@ -108,9 +98,7 @@ async def test_state_jammed(hass: HomeAssistant) -> None: activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_JAMMED + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_JAMMED async def test_one_lock_operation( @@ -119,35 +107,27 @@ async def test_one_lock_operation( """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) await _create_august_with_devices(hass, [lock_one]) + states = hass.states - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED # No activity means it will be unavailable until the activity feed has data lock_operator_sensor = entity_registry.async_get( @@ -155,8 +135,7 @@ async def test_one_lock_operation( ) assert lock_operator_sensor assert ( - hass.states.get("sensor.online_with_doorsense_name_operator").state - == STATE_UNKNOWN + states.get("sensor.online_with_doorsense_name_operator").state == STATE_UNKNOWN ) @@ -170,7 +149,6 @@ async def test_open_lock_operation(hass: HomeAssistant) -> None: data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - await hass.async_block_till_done() lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") assert lock_online_with_unlatch_name.state == STATE_UNLOCKED @@ -189,12 +167,10 @@ async def test_open_lock_operation_pubnub_connected( await _create_august_with_devices(hass, [lock_with_unlatch], pubnub=pubnub) pubnub.connected = True - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_LOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_LOCKED data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -209,8 +185,7 @@ async def test_open_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_unlatch_name = hass.states.get("lock.online_with_unlatch_name") - assert lock_online_with_unlatch_name.state == STATE_UNLOCKED + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_UNLOCKED await hass.async_block_till_done() @@ -227,19 +202,15 @@ async def test_one_lock_operation_pubnub_connected( await _create_august_with_devices(hass, [lock_one], pubnub=pubnub) pubnub.connected = True - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -254,17 +225,13 @@ async def test_one_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) - await hass.async_block_till_done() pubnub.message( pubnub, @@ -279,8 +246,8 @@ async def test_one_lock_operation_pubnub_connected( await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED # No activity means it will be unavailable until the activity feed has data lock_operator_sensor = entity_registry.async_get( @@ -306,8 +273,8 @@ async def test_one_lock_operation_pubnub_connected( ) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED async def test_lock_jammed(hass: HomeAssistant) -> None: @@ -325,22 +292,18 @@ async def test_lock_jammed(hass: HomeAssistant) -> None: }, ) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) - await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_JAMMED + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_JAMMED async def test_lock_throws_exception_on_unknown_status_code( @@ -360,15 +323,12 @@ async def test_lock_throws_exception_on_unknown_status_code( }, ) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + lock_state = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert lock_state.state == STATE_LOCKED - assert lock_online_with_doorsense_name.attributes.get("battery_level") == 92 - assert ( - lock_online_with_doorsense_name.attributes.get("friendly_name") - == "online_with_doorsense Name" - ) + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} with pytest.raises(ClientResponseError): @@ -383,9 +343,7 @@ async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one]) - lock_brokenid_name = hass.states.get("lock.brokenid_name") - - assert lock_brokenid_name.state == STATE_UNKNOWN + assert hass.states.get("lock.brokenid_name").state == STATE_UNKNOWN async def test_lock_bridge_offline(hass: HomeAssistant) -> None: @@ -397,9 +355,7 @@ async def test_lock_bridge_offline(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_UNAVAILABLE + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_UNAVAILABLE async def test_lock_bridge_online(hass: HomeAssistant) -> None: @@ -411,14 +367,13 @@ async def test_lock_bridge_online(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [lock_one], activities=activities) - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_LOCKED async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: """Test creation of a lock with doorsense and bridge.""" lock_one = await _mock_doorsense_enabled_august_lock_detail(hass) + states = hass.states assert lock_one.pubsub_channel == "pubsub" pubnub = AugustPubNub() @@ -428,9 +383,7 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: ) pubnub.connected = True - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - - assert lock_online_with_doorsense_name.state == STATE_LOCKED + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED pubnub.message( pubnub, @@ -446,8 +399,7 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING pubnub.message( pubnub, @@ -463,25 +415,21 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_LOCKING pubnub.connected = True async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING # Ensure pubnub status is always preserved async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_LOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING pubnub.message( pubnub, @@ -496,13 +444,11 @@ async def test_lock_update_via_pubnub(hass: HomeAssistant) -> None: await hass.async_block_till_done() await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) await hass.async_block_till_done() - lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") - assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING await hass.config_entries.async_unload(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/august/test_sensor.py b/tests/components/august/test_sensor.py index 67223e9dff0..2d72d287ce3 100644 --- a/tests/components/august/test_sensor.py +++ b/tests/components/august/test_sensor.py @@ -28,13 +28,9 @@ async def test_create_doorbell(hass: HomeAssistant) -> None: doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") await _create_august_with_devices(hass, [doorbell_one]) - sensor_k98gidt45gul_name_battery = hass.states.get( - "sensor.k98gidt45gul_name_battery" - ) - assert sensor_k98gidt45gul_name_battery.state == "96" - assert ( - sensor_k98gidt45gul_name_battery.attributes["unit_of_measurement"] == PERCENTAGE - ) + battery_state = hass.states.get("sensor.k98gidt45gul_name_battery") + assert battery_state.state == "96" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE async def test_create_doorbell_offline( @@ -44,9 +40,9 @@ async def test_create_doorbell_offline( doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") await _create_august_with_devices(hass, [doorbell_one]) - sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") - assert sensor_tmt100_name_battery.state == "81" - assert sensor_tmt100_name_battery.attributes["unit_of_measurement"] == PERCENTAGE + battery_state = hass.states.get("sensor.tmt100_name_battery") + assert battery_state.state == "81" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get("sensor.tmt100_name_battery") assert entry @@ -60,8 +56,7 @@ async def test_create_doorbell_hardwired(hass: HomeAssistant) -> None: ) await _create_august_with_devices(hass, [doorbell_one]) - sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") - assert sensor_tmt100_name_battery is None + assert hass.states.get("sensor.tmt100_name_battery") is None async def test_create_lock_with_linked_keypad( @@ -71,25 +66,21 @@ async def test_create_lock_with_linked_keypad( lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") await _create_august_with_devices(hass, [lock_one]) - sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( + battery_state = hass.states.get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) - assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" - assert ( - sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ - "unit_of_measurement" - ] - == PERCENTAGE - ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "62" - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + keypad_battery_state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "62" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" @@ -101,42 +92,32 @@ async def test_create_lock_with_low_battery_linked_keypad( """Test creation of a lock with a linked keypad that both have a battery.""" lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") await _create_august_with_devices(hass, [lock_one]) + states = hass.states - sensor_a6697750d607098bae8d6baa11ef8063_name_battery = hass.states.get( - "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" - ) - assert sensor_a6697750d607098bae8d6baa11ef8063_name_battery.state == "88" - assert ( - sensor_a6697750d607098bae8d6baa11ef8063_name_battery.attributes[ - "unit_of_measurement" - ] - == PERCENTAGE - ) + battery_state = states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_battery") + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" ) assert entry assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" - state = hass.states.get("sensor.front_door_lock_keypad_battery") - assert state.state == "10" - assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + keypad_battery_state = states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "10" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") assert entry assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" # No activity means it will be unavailable until someone unlocks/locks it - lock_operator_sensor = entity_registry.async_get( + operator_entry = entity_registry.async_get( "sensor.a6697750d607098bae8d6baa11ef8063_name_operator" ) - assert ( - lock_operator_sensor.unique_id - == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" - ) - assert ( - hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state - == STATE_UNKNOWN - ) + assert operator_entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" + + operator_state = states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator") + assert operator_state.state == STATE_UNKNOWN async def test_lock_operator_bluetooth( diff --git a/tests/components/aussie_broadband/test_config_flow.py b/tests/components/aussie_broadband/test_config_flow.py index 6ee674ab0f4..76e96c5cc02 100644 --- a/tests/components/aussie_broadband/test_config_flow.py +++ b/tests/components/aussie_broadband/test_config_flow.py @@ -13,6 +13,8 @@ from homeassistant.data_entry_flow import FlowResultType from .common import FAKE_DATA, FAKE_SERVICES +from tests.common import MockConfigEntry + TEST_USERNAME = FAKE_DATA[CONF_USERNAME] TEST_PASSWORD = FAKE_DATA[CONF_PASSWORD] @@ -163,41 +165,15 @@ async def test_form_network_issue(hass: HomeAssistant) -> None: async def test_reauth(hass: HomeAssistant) -> None: """Test reauth flow.""" - - # Test reauth but the entry doesn't exist - result1 = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=FAKE_DATA + mock_entry = MockConfigEntry( + domain=DOMAIN, + data=FAKE_DATA, + unique_id=FAKE_DATA[CONF_USERNAME], ) - - with ( - patch("aussiebb.asyncio.AussieBB.__init__", return_value=None), - patch("aussiebb.asyncio.AussieBB.login", return_value=True), - patch( - "aussiebb.asyncio.AussieBB.get_services", return_value=[FAKE_SERVICES[0]] - ), - patch( - "homeassistant.components.aussie_broadband.async_setup_entry", - return_value=True, - ), - ): - result2 = await hass.config_entries.flow.async_configure( - result1["flow_id"], - { - CONF_PASSWORD: TEST_PASSWORD, - }, - ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.CREATE_ENTRY - assert result2["title"] == TEST_USERNAME - assert result2["data"] == FAKE_DATA + mock_entry.add_to_hass(hass) # Test failed reauth - result5 = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FAKE_DATA, - ) + result5 = await mock_entry.start_reauth_flow(hass) assert result5["step_id"] == "reauth_confirm" with ( diff --git a/tests/components/autarco/snapshots/test_sensor.ambr b/tests/components/autarco/snapshots/test_sensor.ambr index 2ff0236a59f..0aa093d6a6d 100644 --- a/tests/components/autarco/snapshots/test_sensor.ambr +++ b/tests/components/autarco/snapshots/test_sensor.ambr @@ -401,405 +401,3 @@ 'state': '200', }) # --- -# name: test_solar_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy AC output total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_energy_total', - 'unique_id': 'test-serial-1_out_ac_energy_total', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_1_energy_ac_output_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter test-serial-1 Energy AC output total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_1_energy_ac_output_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_1_power_ac_output-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC output', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_power', - 'unique_id': 'test-serial-1_out_ac_power', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_1_power_ac_output-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter test-serial-1 Power AC output', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_1_power_ac_output', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '200', - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy AC output total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_energy_total', - 'unique_id': 'test-serial-2_out_ac_energy_total', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_2_energy_ac_output_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Inverter test-serial-2 Energy AC output total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_2_energy_ac_output_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_2_power_ac_output-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC output', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'out_ac_power', - 'unique_id': 'test-serial-2_out_ac_power', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.inverter_test_serial_2_power_ac_output-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Inverter test-serial-2 Power AC output', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.inverter_test_serial_2_power_ac_output', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '500', - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production month', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_month', - 'unique_id': '1_solar_energy_production_month', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production month', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '58', - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_today-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_today', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production today', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_today', - 'unique_id': '1_solar_energy_production_today', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_today-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production today', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_today', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4', - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_energy_production_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Energy production total', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'energy_production_total', - 'unique_id': '1_solar_energy_production_total', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.solar_energy_production_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'Solar Energy production total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_energy_production_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10379', - }) -# --- -# name: test_solar_sensors[sensor.solar_power_production-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solar_power_production', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power production', - 'platform': 'autarco', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_production', - 'unique_id': '1_solar_power_production', - 'unit_of_measurement': , - }) -# --- -# name: test_solar_sensors[sensor.solar_power_production-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'Solar Power production', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solar_power_production', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '200', - }) -# --- diff --git a/tests/components/auth/test_init.py b/tests/components/auth/test_init.py index 0f4908c2fc0..718bb369b53 100644 --- a/tests/components/auth/test_init.py +++ b/tests/components/auth/test_init.py @@ -13,6 +13,7 @@ from homeassistant.auth.models import ( TOKEN_TYPE_LONG_LIVED_ACCESS_TOKEN, TOKEN_TYPE_NORMAL, Credentials, + RefreshToken, ) from homeassistant.components import auth from homeassistant.core import HomeAssistant @@ -37,7 +38,7 @@ def mock_credential(): ) -async def async_setup_user_refresh_token(hass): +async def async_setup_user_refresh_token(hass: HomeAssistant) -> RefreshToken: """Create a testing user with a connected credential.""" user = await hass.auth.async_create_user("Test User") diff --git a/tests/components/auth/test_init_link_user.py b/tests/components/auth/test_init_link_user.py index d1a5fa51af2..a8f04c2720d 100644 --- a/tests/components/auth/test_init_link_user.py +++ b/tests/components/auth/test_init_link_user.py @@ -1,6 +1,7 @@ """Tests for the link user flow.""" from http import HTTPStatus +from typing import Any from unittest.mock import patch from homeassistant.core import HomeAssistant @@ -11,7 +12,9 @@ from tests.common import CLIENT_ID, CLIENT_REDIRECT_URI from tests.typing import ClientSessionGenerator -async def async_get_code(hass, aiohttp_client): +async def async_get_code( + hass: HomeAssistant, aiohttp_client: ClientSessionGenerator +) -> dict[str, Any]: """Return authorization code for link user tests.""" config = [ { diff --git a/tests/components/awair/test_config_flow.py b/tests/components/awair/test_config_flow.py index ab9f5faa425..ac17cf41448 100644 --- a/tests/components/awair/test_config_flow.py +++ b/tests/components/awair/test_config_flow.py @@ -7,7 +7,7 @@ from aiohttp.client_exceptions import ClientConnectorError from python_awair.exceptions import AuthError, AwairError from homeassistant.components.awair.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_ACCESS_TOKEN, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -136,11 +136,7 @@ async def test_reauth(hass: HomeAssistant, user, cloud_devices) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": CLOUD_UNIQUE_ID}, - data={**CLOUD_CONFIG, CONF_ACCESS_TOKEN: "blah"}, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -180,11 +176,7 @@ async def test_reauth_error(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": CLOUD_UNIQUE_ID}, - data={**CLOUD_CONFIG, CONF_ACCESS_TOKEN: "blah"}, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/aws/test_init.py b/tests/components/aws/test_init.py index 9589ad6c037..820b08e51b4 100644 --- a/tests/components/aws/test_init.py +++ b/tests/components/aws/test_init.py @@ -1,6 +1,7 @@ """Tests for the aws component config and setup.""" import json +from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch as async_patch from homeassistant.core import HomeAssistant @@ -10,7 +11,7 @@ from homeassistant.setup import async_setup_component class MockAioSession: """Mock AioSession.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init a mock session.""" self.get_user = AsyncMock() self.invoke = AsyncMock() diff --git a/tests/components/axis/snapshots/test_binary_sensor.ambr b/tests/components/axis/snapshots/test_binary_sensor.ambr index 94b1cc2fc2e..ab860489d55 100644 --- a/tests/components/axis/snapshots/test_binary_sensor.ambr +++ b/tests/components/axis/snapshots/test_binary_sensor.ambr @@ -1,79 +1,4 @@ # serializer version: 1 -# name: test_binary_sensors[event0-binary_sensor.name_daynight_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'light', - 'friendly_name': 'name DayNight 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_daynight_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event0-daynight_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'light', - 'friendly_name': 'name DayNight 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_daynight_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event0-daynight_1][binary_sensor.home_daynight_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_daynight_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'DayNight 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:VideoSource/tnsaxis:DayNightVision-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event0-daynight_1][binary_sensor.home_daynight_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'light', - 'friendly_name': 'home DayNight 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_daynight_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event0][binary_sensor.home_daynight_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -121,156 +46,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event1-binary_sensor.name_sound_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'sound', - 'friendly_name': 'name Sound 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_sound_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event1-sound_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'sound', - 'friendly_name': 'name Sound 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_sound_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event1-sound_1][binary_sensor.home_sound_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_sound_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Sound 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:AudioSource/tnsaxis:TriggerLevel-1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event1-sound_1][binary_sensor.home_sound_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'sound', - 'friendly_name': 'home Sound 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_sound_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event10-binary_sensor.name_object_analytics_device1scenario8] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Object Analytics Device1Scenario8', - }), - 'context': , - 'entity_id': 'binary_sensor.name_object_analytics_device1scenario8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event10-object_analytics_device1scenario8] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Object Analytics Device1Scenario8', - }), - 'context': , - 'entity_id': 'binary_sensor.name_object_analytics_device1scenario8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event10-object_analytics_device1scenario8][binary_sensor.home_object_analytics_device1scenario8-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Object Analytics Device1Scenario8', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario8-Device1Scenario8', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event10-object_analytics_device1scenario8][binary_sensor.home_object_analytics_device1scenario8-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Object Analytics Device1Scenario8', - }), - 'context': , - 'entity_id': 'binary_sensor.home_object_analytics_device1scenario8', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event10][binary_sensor.home_object_analytics_device1scenario8-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -365,81 +140,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[event2-binary_sensor.name_pir_sensor] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'name PIR sensor', - }), - 'context': , - 'entity_id': 'binary_sensor.name_pir_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event2-pir_sensor] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'name PIR sensor', - }), - 'context': , - 'entity_id': 'binary_sensor.name_pir_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event2-pir_sensor][binary_sensor.home_pir_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_pir_sensor', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PIR sensor', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:IO/Port-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event2-pir_sensor][binary_sensor.home_pir_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'home PIR sensor', - }), - 'context': , - 'entity_id': 'binary_sensor.home_pir_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensors[event2][binary_sensor.home_pir_sensor-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -487,81 +187,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[event3-binary_sensor.name_pir_0] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name PIR 0', - }), - 'context': , - 'entity_id': 'binary_sensor.name_pir_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event3-pir_0] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name PIR 0', - }), - 'context': , - 'entity_id': 'binary_sensor.name_pir_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensors[event3-pir_0][binary_sensor.home_pir_0-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_pir_0', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'PIR 0', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tns1:Device/tnsaxis:Sensor/PIR-0', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event3-pir_0][binary_sensor.home_pir_0-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home PIR 0', - }), - 'context': , - 'entity_id': 'binary_sensor.home_pir_0', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_binary_sensors[event3][binary_sensor.home_pir_0-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -609,81 +234,6 @@ 'state': 'off', }) # --- -# name: test_binary_sensors[event4-binary_sensor.name_fence_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Fence Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_fence_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event4-fence_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Fence Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_fence_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event4-fence_guard_profile_1][binary_sensor.home_fence_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_fence_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Fence Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/FenceGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event4-fence_guard_profile_1][binary_sensor.home_fence_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Fence Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_fence_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event4][binary_sensor.home_fence_guard_profile_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -731,81 +281,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event5-binary_sensor.name_motion_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Motion Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_motion_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event5-motion_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Motion Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_motion_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event5-motion_guard_profile_1][binary_sensor.home_motion_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_motion_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Motion Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/MotionGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event5-motion_guard_profile_1][binary_sensor.home_motion_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Motion Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_motion_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event5][binary_sensor.home_motion_guard_profile_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -853,81 +328,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event6-binary_sensor.name_loitering_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Loitering Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_loitering_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event6-loitering_guard_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Loitering Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_loitering_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event6-loitering_guard_profile_1][binary_sensor.home_loitering_guard_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Loitering Guard Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/LoiteringGuard/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event6-loitering_guard_profile_1][binary_sensor.home_loitering_guard_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Loitering Guard Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_loitering_guard_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event6][binary_sensor.home_loitering_guard_profile_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -975,81 +375,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event7-binary_sensor.name_vmd4_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name VMD4 Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_vmd4_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event7-vmd4_profile_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name VMD4 Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_vmd4_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event7-vmd4_profile_1][binary_sensor.home_vmd4_profile_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_vmd4_profile_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VMD4 Profile 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile1-Camera1Profile1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event7-vmd4_profile_1][binary_sensor.home_vmd4_profile_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home VMD4 Profile 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_vmd4_profile_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event7][binary_sensor.home_vmd4_profile_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1097,81 +422,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event8-binary_sensor.name_object_analytics_scenario_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Object Analytics Scenario 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_object_analytics_scenario_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event8-object_analytics_scenario_1] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name Object Analytics Scenario 1', - }), - 'context': , - 'entity_id': 'binary_sensor.name_object_analytics_scenario_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event8-object_analytics_scenario_1][binary_sensor.home_object_analytics_scenario_1-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Object Analytics Scenario 1', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/ObjectAnalytics/Device1Scenario1-Device1Scenario1', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event8-object_analytics_scenario_1][binary_sensor.home_object_analytics_scenario_1-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home Object Analytics Scenario 1', - }), - 'context': , - 'entity_id': 'binary_sensor.home_object_analytics_scenario_1', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event8][binary_sensor.home_object_analytics_scenario_1-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1219,81 +469,6 @@ 'state': 'on', }) # --- -# name: test_binary_sensors[event9-binary_sensor.name_vmd4_camera1profile9] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name VMD4 Camera1Profile9', - }), - 'context': , - 'entity_id': 'binary_sensor.name_vmd4_camera1profile9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event9-vmd4_camera1profile9] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'name VMD4 Camera1Profile9', - }), - 'context': , - 'entity_id': 'binary_sensor.name_vmd4_camera1profile9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensors[event9-vmd4_camera1profile9][binary_sensor.home_vmd4_camera1profile9-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'VMD4 Camera1Profile9', - 'platform': 'axis', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:40:8c:12:34:56-tnsaxis:CameraApplicationPlatform/VMD/Camera1Profile9-Camera1Profile9', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensors[event9-vmd4_camera1profile9][binary_sensor.home_vmd4_camera1profile9-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'motion', - 'friendly_name': 'home VMD4 Camera1Profile9', - }), - 'context': , - 'entity_id': 'binary_sensor.home_vmd4_camera1profile9', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensors[event9][binary_sensor.home_vmd4_camera1profile9-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/axis/test_config_flow.py b/tests/components/axis/test_config_flow.py index 5ceb6588fbd..8591b4583c1 100644 --- a/tests/components/axis/test_config_flow.py +++ b/tests/components/axis/test_config_flow.py @@ -17,7 +17,6 @@ from homeassistant.components.axis.const import ( ) from homeassistant.config_entries import ( SOURCE_DHCP, - SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER, @@ -205,12 +204,7 @@ async def test_reauth_flow_update_configuration( assert config_entry_setup.data[CONF_USERNAME] == "root" assert config_entry_setup.data[CONF_PASSWORD] == "pass" - result = await hass.config_entries.flow.async_init( - AXIS_DOMAIN, - context={"source": SOURCE_REAUTH}, - data=config_entry_setup.data, - ) - + result = await config_entry_setup.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/azure_devops/__init__.py b/tests/components/azure_devops/__init__.py index d636a6fda6d..cc4732b1495 100644 --- a/tests/components/azure_devops/__init__.py +++ b/tests/components/azure_devops/__init__.py @@ -2,7 +2,7 @@ from typing import Final -from aioazuredevops.models.builds import Build, BuildDefinition +from aioazuredevops.models.build import Build, BuildDefinition from aioazuredevops.models.core import Project from homeassistant.components.azure_devops.const import CONF_ORG, CONF_PAT, CONF_PROJECT diff --git a/tests/components/azure_devops/snapshots/test_sensor.ambr b/tests/components/azure_devops/snapshots/test_sensor.ambr index 0ce82cae1e8..aa8d1d9e7e0 100644 --- a/tests/components/azure_devops/snapshots/test_sensor.ambr +++ b/tests/components/azure_devops/snapshots/test_sensor.ambr @@ -1,467 +1,4 @@ # serializer version: 1 -# name: test_sensors[sensor.testproject_ci_build_finish_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build finish time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'finish_time', - 'unique_id': 'testorg_1234_9876_finish_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_finish_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build finish time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_id-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_id', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build id', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'build_id', - 'unique_id': 'testorg_1234_9876_build_id', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_id-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5678', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_queue_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build queue time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'queue_time', - 'unique_id': 'testorg_1234_9876_queue_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_queue_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build queue time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_reason-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_reason', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build reason', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'reason', - 'unique_id': 'testorg_1234_9876_reason', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_reason-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build reason', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_reason', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'manual', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_result-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_result', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build result', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'result', - 'unique_id': 'testorg_1234_9876_result', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_result-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build result', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_result', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'succeeded', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_branch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build source branch', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'source_branch', - 'unique_id': 'testorg_1234_9876_source_branch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_branch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source branch', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'main', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_version-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build source version', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'source_version', - 'unique_id': 'testorg_1234_9876_source_version', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_source_version-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source version', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '123', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_start_time-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'CI build start time', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'start_time', - 'unique_id': 'testorg_1234_9876_start_time', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_start_time-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build start time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-01-01T00:00:00+00:00', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_status', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build status', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'status', - 'unique_id': 'testorg_1234_9876_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'completed', - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_url-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_build_url', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI build url', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'url', - 'unique_id': 'testorg_1234_9876_url', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_build_url-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build url', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_url', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors[sensor.testproject_ci_latest_build-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -891,52 +428,6 @@ 'state': '2021-01-01T00:00:00+00:00', }) # --- -# name: test_sensors[sensor.testproject_ci_latest_build_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CI latest build status', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'status', - 'unique_id': 'testorg_1234_9876_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_ci_latest_build_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'completed', - }) -# --- # name: test_sensors[sensor.testproject_ci_latest_build_url-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -983,243 +474,6 @@ 'state': 'unknown', }) # --- -# name: test_sensors[sensor.testproject_test_build_build_id-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_test_build_build_id', - 'has_entity_name': True, - 'hidden_by': , - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Test Build build id', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'build_id', - 'unique_id': 'testorg_1234_9876_build_id', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_test_build_build_id-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject Test Build build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_test_build_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '5678', - }) -# --- -# name: test_sensors[sensor.testproject_test_build_latest_build-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.testproject_test_build_latest_build', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Test Build latest build', - 'platform': 'azure_devops', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'latest_build', - 'unique_id': 'testorg_1234_9876_latest_build', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.testproject_test_build_latest_build-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'definition_id': 9876, - 'definition_name': 'Test Build', - 'finish_time': '2021-01-01T00:00:00Z', - 'friendly_name': 'testproject Test Build latest build', - 'id': 5678, - 'queue_time': '2021-01-01T00:00:00Z', - 'reason': 'manual', - 'result': 'succeeded', - 'source_branch': 'main', - 'source_version': '123', - 'start_time': '2021-01-01T00:00:00Z', - 'status': 'completed', - 'url': None, - }), - 'context': , - 'entity_id': 'sensor.testproject_test_build_latest_build', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_finish_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build finish time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_finish_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_id-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build id', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_id', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '6789', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_queue_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build queue time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_queue_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_reason-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build reason', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_reason', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_result-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build result', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_result', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_source_branch-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source branch', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_branch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_source_version-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build source version', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_source_version', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_start_time-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'date', - 'friendly_name': 'testproject CI build start time', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_start_time', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_status-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_sensors_missing_data[sensor.testproject_ci_build_url-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI build url', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_build_url', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors_missing_data[sensor.testproject_ci_latest_build-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -1352,19 +606,6 @@ 'state': 'unknown', }) # --- -# name: test_sensors_missing_data[sensor.testproject_ci_latest_build_status-state-missing-data] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'testproject CI latest build status', - }), - 'context': , - 'entity_id': 'sensor.testproject_ci_latest_build_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensors_missing_data[sensor.testproject_ci_latest_build_url-state-missing-data] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/azure_devops/test_config_flow.py b/tests/components/azure_devops/test_config_flow.py index 45dc10802b9..9ebc9991939 100644 --- a/tests/components/azure_devops/test_config_flow.py +++ b/tests/components/azure_devops/test_config_flow.py @@ -53,18 +53,14 @@ async def test_authorization_error( async def test_reauth_authorization_error( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps authorization error.""" mock_devops_client.authorize.return_value = False mock_devops_client.authorized = False - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -108,17 +104,14 @@ async def test_connection_error( async def test_reauth_connection_error( hass: HomeAssistant, + mock_config_entry: MockConfigEntry, mock_devops_client: AsyncMock, ) -> None: """Test we show user form on Azure DevOps connection error.""" mock_devops_client.authorize.side_effect = aiohttp.ClientError mock_devops_client.authorized = False - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -174,11 +167,7 @@ async def test_reauth_project_error( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -205,11 +194,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" diff --git a/tests/components/baf/__init__.py b/tests/components/baf/__init__.py index f1074a87cee..a047029f9a0 100644 --- a/tests/components/baf/__init__.py +++ b/tests/components/baf/__init__.py @@ -12,7 +12,7 @@ class MockBAFDevice(Device): """A simple mock for a BAF Device.""" # pylint: disable-next=super-init-not-called - def __init__(self, async_wait_available_side_effect=None): + def __init__(self, async_wait_available_side_effect=None) -> None: """Init simple mock.""" self._async_wait_available_side_effect = async_wait_available_side_effect diff --git a/tests/components/bang_olufsen/conftest.py b/tests/components/bang_olufsen/conftest.py index 4764798f34d..dd6c4a73469 100644 --- a/tests/components/bang_olufsen/conftest.py +++ b/tests/components/bang_olufsen/conftest.py @@ -36,7 +36,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(): +def mock_config_entry() -> MockConfigEntry: """Mock config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -47,7 +47,11 @@ def mock_config_entry(): @pytest.fixture -async def mock_media_player(hass: HomeAssistant, mock_config_entry, mock_mozart_client): +async def mock_media_player( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: """Mock media_player entity.""" mock_config_entry.add_to_hass(hass) @@ -241,14 +245,17 @@ def mock_mozart_client() -> Generator[AsyncMock]: # Non-REST API client methods client.check_device_connection = AsyncMock() client.close_api_client = AsyncMock() + + # WebSocket listener client.connect_notifications = AsyncMock() client.disconnect_notifications = Mock() + client.websocket_connected = False yield client @pytest.fixture -def mock_setup_entry(): +def mock_setup_entry() -> Generator[AsyncMock]: """Mock successful setup entry.""" with patch( "homeassistant.components.bang_olufsen.async_setup_entry", return_value=True diff --git a/tests/components/bang_olufsen/test_config_flow.py b/tests/components/bang_olufsen/test_config_flow.py index e637120a6ae..5d5f34a79e6 100644 --- a/tests/components/bang_olufsen/test_config_flow.py +++ b/tests/components/bang_olufsen/test_config_flow.py @@ -1,6 +1,6 @@ """Test the bang_olufsen config_flow.""" -from unittest.mock import Mock +from unittest.mock import AsyncMock, Mock from aiohttp.client_exceptions import ClientConnectorError from mozart_api.exceptions import ApiException @@ -25,7 +25,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_config_flow_timeout_error( - hass: HomeAssistant, mock_mozart_client + hass: HomeAssistant, mock_mozart_client: AsyncMock ) -> None: """Test we handle timeout_error.""" mock_mozart_client.get_beolink_self.side_effect = TimeoutError() @@ -42,7 +42,7 @@ async def test_config_flow_timeout_error( async def test_config_flow_client_connector_error( - hass: HomeAssistant, mock_mozart_client + hass: HomeAssistant, mock_mozart_client: AsyncMock ) -> None: """Test we handle client_connector_error.""" mock_mozart_client.get_beolink_self.side_effect = ClientConnectorError( @@ -73,7 +73,7 @@ async def test_config_flow_invalid_ip(hass: HomeAssistant) -> None: async def test_config_flow_api_exception( - hass: HomeAssistant, mock_mozart_client + hass: HomeAssistant, mock_mozart_client: AsyncMock ) -> None: """Test we handle api_exception.""" mock_mozart_client.get_beolink_self.side_effect = ApiException() @@ -89,7 +89,7 @@ async def test_config_flow_api_exception( assert mock_mozart_client.get_beolink_self.call_count == 1 -async def test_config_flow(hass: HomeAssistant, mock_mozart_client) -> None: +async def test_config_flow(hass: HomeAssistant, mock_mozart_client: AsyncMock) -> None: """Test config flow.""" result_init = await hass.config_entries.flow.async_init( @@ -112,7 +112,9 @@ async def test_config_flow(hass: HomeAssistant, mock_mozart_client) -> None: assert mock_mozart_client.get_beolink_self.call_count == 1 -async def test_config_flow_zeroconf(hass: HomeAssistant, mock_mozart_client) -> None: +async def test_config_flow_zeroconf( + hass: HomeAssistant, mock_mozart_client: AsyncMock +) -> None: """Test zeroconf discovery.""" result_zeroconf = await hass.config_entries.flow.async_init( @@ -162,7 +164,7 @@ async def test_config_flow_zeroconf_ipv6(hass: HomeAssistant) -> None: async def test_config_flow_zeroconf_invalid_ip( - hass: HomeAssistant, mock_mozart_client + hass: HomeAssistant, mock_mozart_client: AsyncMock ) -> None: """Test zeroconf discovery with invalid IP address.""" mock_mozart_client.get_beolink_self.side_effect = ClientConnectorError( diff --git a/tests/components/bang_olufsen/test_init.py b/tests/components/bang_olufsen/test_init.py index 11742b846ae..3eb98e956be 100644 --- a/tests/components/bang_olufsen/test_init.py +++ b/tests/components/bang_olufsen/test_init.py @@ -1,5 +1,7 @@ """Test the bang_olufsen __init__.""" +from unittest.mock import AsyncMock + from aiohttp.client_exceptions import ServerTimeoutError from homeassistant.components.bang_olufsen import DOMAIN @@ -9,12 +11,14 @@ from homeassistant.helpers.device_registry import DeviceRegistry from .const import TEST_MODEL_BALANCE, TEST_NAME, TEST_SERIAL_NUMBER +from tests.common import MockConfigEntry + async def test_setup_entry( hass: HomeAssistant, - mock_config_entry, - mock_mozart_client, device_registry: DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test async_setup_entry.""" @@ -41,7 +45,9 @@ async def test_setup_entry( async def test_setup_entry_failed( - hass: HomeAssistant, mock_config_entry, mock_mozart_client + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test failed async_setup_entry.""" @@ -66,7 +72,9 @@ async def test_setup_entry_failed( async def test_unload_entry( - hass: HomeAssistant, mock_config_entry, mock_mozart_client + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test unload_entry.""" diff --git a/tests/components/bang_olufsen/test_media_player.py b/tests/components/bang_olufsen/test_media_player.py index 74867a8eedf..9928a626a4f 100644 --- a/tests/components/bang_olufsen/test_media_player.py +++ b/tests/components/bang_olufsen/test_media_player.py @@ -1,16 +1,22 @@ """Test the Bang & Olufsen media_player entity.""" +from collections.abc import Callable from contextlib import nullcontext as does_not_raise -from unittest.mock import ANY, patch +import logging +from unittest.mock import AsyncMock, patch -from mozart_api.models import PlaybackContentMetadata +from mozart_api.models import ( + PlaybackContentMetadata, + RenderingState, + Source, + WebsocketNotificationTag, +) import pytest from homeassistant.components.bang_olufsen.const import ( BANG_OLUFSEN_STATES, DOMAIN, BangOlufsenSource, - WebsocketNotification, ) from homeassistant.components.media_player import ( ATTR_INPUT_SOURCE, @@ -36,7 +42,6 @@ from homeassistant.components.media_player import ( from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError -from homeassistant.helpers.dispatcher import async_dispatcher_send from homeassistant.setup import async_setup_component from .const import ( @@ -57,7 +62,6 @@ from .const import ( TEST_PLAYBACK_STATE_TURN_OFF, TEST_RADIO_STATION, TEST_SEEK_POSITION_HOME_ASSISTANT_FORMAT, - TEST_SERIAL_NUMBER, TEST_SOURCES, TEST_VIDEO_SOURCES, TEST_VOLUME, @@ -71,21 +75,21 @@ from tests.typing import WebSocketGenerator async def test_initialization( - hass: HomeAssistant, mock_config_entry: MockConfigEntry, mock_mozart_client + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test the integration is initialized properly in _initialize, async_added_to_hass and __init__.""" + caplog.set_level(logging.DEBUG) + # Setup entity - with patch( - "homeassistant.components.bang_olufsen.media_player._LOGGER.debug" - ) as mock_logger: - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) # Ensure that the logger has been called with the debug message - mock_logger.assert_called_once_with( - "Connected to: %s %s running SW %s", "Beosound Balance", "11111111", "1.0.0" - ) + assert "Connected to: Beosound Balance 11111111 running SW 1.0.0" in caplog.text # Check state (The initial state in this test does not contain all that much. # States are tested using simulated WebSocket events.) @@ -101,7 +105,9 @@ async def test_initialization( async def test_async_update_sources_audio_only( - hass: HomeAssistant, mock_config_entry, mock_mozart_client + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, ) -> None: """Test sources are correctly handled in _async_update_sources.""" mock_mozart_client.get_remote_menu.return_value = {} @@ -114,7 +120,9 @@ async def test_async_update_sources_audio_only( async def test_async_update_sources_outdated_api( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test fallback sources are correctly handled in _async_update_sources.""" mock_mozart_client.get_available_sources.side_effect = ValueError() @@ -129,14 +137,43 @@ async def test_async_update_sources_outdated_api( ) +async def test_async_update_sources_remote( + hass: HomeAssistant, mock_mozart_client, mock_config_entry: MockConfigEntry +) -> None: + """Test _async_update_sources is called when there are new video sources.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + notification_callback = mock_mozart_client.get_notification_notifications.call_args[ + 0 + ][0] + + # This is not an ideal check, but I couldn't get anything else to work + assert mock_mozart_client.get_available_sources.call_count == 1 + assert mock_mozart_client.get_remote_menu.call_count == 1 + + # Send the remote menu Websocket event + notification_callback(WebsocketNotificationTag(value="remoteMenuChanged")) + + assert mock_mozart_client.get_available_sources.call_count == 2 + assert mock_mozart_client.get_remote_menu.call_count == 2 + + async def test_async_update_playback_metadata( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_playback_metadata.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_metadata_callback = ( + mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] + ) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_MEDIA_DURATION not in states.attributes assert ATTR_MEDIA_TITLE not in states.attributes @@ -146,11 +183,7 @@ async def test_async_update_playback_metadata( assert ATTR_MEDIA_CHANNEL not in states.attributes # Send the WebSocket event dispatch - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_METADATA}", - TEST_PLAYBACK_METADATA, - ) + playback_metadata_callback(TEST_PLAYBACK_METADATA) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ( @@ -167,47 +200,49 @@ async def test_async_update_playback_metadata( async def test_async_update_playback_error( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_playback_error.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - # The async_dispatcher_send function seems to swallow exceptions, making pytest.raises unusable - with patch("homeassistant.helpers.dispatcher._LOGGER.error") as mock_logger: - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_ERROR}", - TEST_PLAYBACK_ERROR, - ) + playback_error_callback = ( + mock_mozart_client.get_playback_error_notifications.call_args[0][0] + ) - # The traceback can't be tested, so it is replaced with "ANY" - mock_logger.assert_called_once_with( - "%s\n%s", - "Exception in _async_update_playback_error when dispatching '11111111_playback_error': (PlaybackError(error='Test error', item=None),)", - ANY, + # The async_dispatcher_send function seems to swallow exceptions, making pytest.raises unusable + playback_error_callback(TEST_PLAYBACK_ERROR) + + assert ( + "Exception in _async_update_playback_error when dispatching '11111111_playback_error': (PlaybackError(error='Test error', item=None),)" + in caplog.text ) async def test_async_update_playback_progress( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_playback_progress.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_progress_callback = ( + mock_mozart_client.get_playback_progress_notifications.call_args[0][0] + ) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_MEDIA_POSITION not in states.attributes old_updated_at = states.attributes[ATTR_MEDIA_POSITION_UPDATED_AT] assert old_updated_at - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_PROGRESS}", - TEST_PLAYBACK_PROGRESS, - ) + playback_progress_callback(TEST_PLAYBACK_PROGRESS) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.attributes[ATTR_MEDIA_POSITION] == TEST_PLAYBACK_PROGRESS.progress @@ -217,21 +252,23 @@ async def test_async_update_playback_progress( async def test_async_update_playback_state( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test _async_update_playback_state.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_state_callback = ( + mock_mozart_client.get_playback_state_notifications.call_args[0][0] + ) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == MediaPlayerState.PLAYING - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", - TEST_PLAYBACK_STATE_PAUSED, - ) + playback_state_callback(TEST_PLAYBACK_STATE_PAUSED) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == TEST_PLAYBACK_STATE_PAUSED.value @@ -292,43 +329,40 @@ async def test_async_update_playback_state( ], ) async def test_async_update_source_change( - reported_source, - real_source, - content_type, - progress, - metadata, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + reported_source: Source, + real_source: Source, + content_type: MediaType, + progress: int, + metadata: PlaybackContentMetadata, ) -> None: """Test _async_update_source_change.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_progress_callback = ( + mock_mozart_client.get_playback_progress_notifications.call_args[0][0] + ) + playback_metadata_callback = ( + mock_mozart_client.get_playback_metadata_notifications.call_args[0][0] + ) + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] + ) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_INPUT_SOURCE not in states.attributes assert states.attributes[ATTR_MEDIA_CONTENT_TYPE] == MediaType.MUSIC # Simulate progress attribute being available - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_PROGRESS}", - TEST_PLAYBACK_PROGRESS, - ) + playback_progress_callback(TEST_PLAYBACK_PROGRESS) # Simulate metadata - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_METADATA}", - metadata, - ) - - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.SOURCE_CHANGE}", - reported_source, - ) + playback_metadata_callback(metadata) + source_change_callback(reported_source) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.attributes[ATTR_INPUT_SOURCE] == real_source.name @@ -337,13 +371,19 @@ async def test_async_update_source_change( async def test_async_turn_off( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_turn_off.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + playback_state_callback = ( + mock_mozart_client.get_playback_state_notifications.call_args[0][0] + ) + await hass.services.async_call( "media_player", "turn_off", @@ -351,11 +391,7 @@ async def test_async_turn_off( blocking=True, ) - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", - TEST_PLAYBACK_STATE_TURN_OFF, - ) + playback_state_callback(TEST_PLAYBACK_STATE_TURN_OFF) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_TURN_OFF.value] @@ -365,13 +401,17 @@ async def test_async_turn_off( async def test_async_set_volume_level( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_set_volume_level and _async_update_volume by proxy.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_MEDIA_VOLUME_LEVEL not in states.attributes @@ -386,11 +426,7 @@ async def test_async_set_volume_level( ) # The service call will trigger a WebSocket notification - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", - TEST_VOLUME, - ) + volume_callback(TEST_VOLUME) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ( @@ -403,13 +439,17 @@ async def test_async_set_volume_level( async def test_async_mute_volume( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_mute_volume.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ATTR_MEDIA_VOLUME_MUTED not in states.attributes @@ -424,11 +464,7 @@ async def test_async_mute_volume( ) # The service call will trigger a WebSocket notification - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", - TEST_VOLUME_MUTED, - ) + volume_callback(TEST_VOLUME_MUTED) states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert ( @@ -451,24 +487,24 @@ async def test_async_mute_volume( ], ) async def test_async_media_play_pause( - initial_state, - command, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + initial_state: RenderingState, + command: str, ) -> None: """Test async_media_play_pause.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - # Set the initial state - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", - initial_state, + playback_state_callback = ( + mock_mozart_client.get_playback_state_notifications.call_args[0][0] ) + # Set the initial state + playback_state_callback(initial_state) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == BANG_OLUFSEN_STATES[initial_state.value] @@ -483,20 +519,22 @@ async def test_async_media_play_pause( async def test_async_media_stop( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_media_stop.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - # Set the state to playing - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.PLAYBACK_STATE}", - TEST_PLAYBACK_STATE_PLAYING, + playback_state_callback = ( + mock_mozart_client.get_playback_state_notifications.call_args[0][0] ) + # Set the state to playing + playback_state_callback(TEST_PLAYBACK_STATE_PLAYING) + states = hass.states.get(TEST_MEDIA_PLAYER_ENTITY_ID) assert states.state == BANG_OLUFSEN_STATES[TEST_PLAYBACK_STATE_PLAYING.value] @@ -512,7 +550,9 @@ async def test_async_media_stop( async def test_async_media_next_track( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_media_next_track.""" @@ -539,25 +579,25 @@ async def test_async_media_next_track( ], ) async def test_async_media_seek( - source, - expected_result, - seek_called_times, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + source: Source, + expected_result: Callable, + seek_called_times: int, ) -> None: """Test async_media_seek.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - # Set the source - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.SOURCE_CHANGE}", - source, + source_change_callback = ( + mock_mozart_client.get_source_change_notifications.call_args[0][0] ) + # Set the source + source_change_callback(source) + # Check results with expected_result: await hass.services.async_call( @@ -574,7 +614,9 @@ async def test_async_media_seek( async def test_async_media_previous_track( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_media_previous_track.""" @@ -592,7 +634,9 @@ async def test_async_media_previous_track( async def test_async_clear_playlist( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_clear_playlist.""" @@ -621,13 +665,13 @@ async def test_async_clear_playlist( ], ) async def test_async_select_source( - source, - expected_result, - audio_source_call, - video_source_call, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + source: str, + expected_result: Callable, + audio_source_call: int, + video_source_call: int, ) -> None: """Test async_select_source with an invalid source.""" @@ -650,7 +694,9 @@ async def test_async_select_source( async def test_async_play_media_invalid_type( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media only accepts valid media types.""" @@ -675,7 +721,9 @@ async def test_async_play_media_invalid_type( async def test_async_play_media_url( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media URL.""" @@ -700,7 +748,9 @@ async def test_async_play_media_url( async def test_async_play_media_overlay_absolute_volume_uri( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media overlay with Home Assistant local URI and absolute volume.""" @@ -731,32 +781,32 @@ async def test_async_play_media_overlay_absolute_volume_uri( async def test_async_play_media_overlay_invalid_offset_volume_tts( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) - with patch( - "homeassistant.components.bang_olufsen.media_player._LOGGER.warning" - ) as mock_logger: - await hass.services.async_call( - "media_player", - "play_media", - { - ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, - ATTR_MEDIA_CONTENT_ID: "Dette er en test", - ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", - ATTR_MEDIA_ANNOUNCE: True, - ATTR_MEDIA_EXTRA: { - "overlay_offset_volume": 20, - "overlay_tts_language": "da-dk", - }, + await hass.services.async_call( + "media_player", + "play_media", + { + ATTR_ENTITY_ID: TEST_MEDIA_PLAYER_ENTITY_ID, + ATTR_MEDIA_CONTENT_ID: "Dette er en test", + ATTR_MEDIA_CONTENT_TYPE: "overlay_tts", + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: { + "overlay_offset_volume": 20, + "overlay_tts_language": "da-dk", }, - blocking=True, - ) - mock_logger.assert_called_once_with("Error setting volume") + }, + blocking=True, + ) + assert "Error setting volume" in caplog.text mock_mozart_client.post_overlay_play.assert_called_once_with( TEST_OVERLAY_INVALID_OFFSET_VOLUME_TTS @@ -764,19 +814,19 @@ async def test_async_play_media_overlay_invalid_offset_volume_tts( async def test_async_play_media_overlay_offset_volume_tts( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Home Assistant invalid offset volume and B&O tts.""" mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) + volume_callback = mock_mozart_client.get_volume_notifications.call_args[0][0] + # Set the volume to enable offset - async_dispatcher_send( - hass, - f"{TEST_SERIAL_NUMBER}_{WebsocketNotification.VOLUME}", - TEST_VOLUME, - ) + volume_callback(TEST_VOLUME) await hass.services.async_call( "media_player", @@ -797,7 +847,9 @@ async def test_async_play_media_overlay_offset_volume_tts( async def test_async_play_media_tts( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Home Assistant tts.""" @@ -821,7 +873,9 @@ async def test_async_play_media_tts( async def test_async_play_media_radio( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with B&O radio.""" @@ -845,7 +899,9 @@ async def test_async_play_media_radio( async def test_async_play_media_favourite( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with B&O favourite.""" @@ -867,7 +923,9 @@ async def test_async_play_media_favourite( async def test_async_play_media_deezer_flow( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Deezer flow.""" @@ -893,7 +951,9 @@ async def test_async_play_media_deezer_flow( async def test_async_play_media_deezer_playlist( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Deezer playlist.""" @@ -918,7 +978,9 @@ async def test_async_play_media_deezer_playlist( async def test_async_play_media_deezer_track( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with Deezer track.""" @@ -942,7 +1004,9 @@ async def test_async_play_media_deezer_track( async def test_async_play_media_invalid_deezer( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media with an invalid/no Deezer login.""" @@ -971,7 +1035,9 @@ async def test_async_play_media_invalid_deezer( async def test_async_play_media_url_m3u( - hass: HomeAssistant, mock_mozart_client, mock_config_entry + hass: HomeAssistant, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, ) -> None: """Test async_play_media URL with the m3u extension.""" @@ -1040,12 +1106,12 @@ async def test_async_play_media_url_m3u( ], ) async def test_async_browse_media( - child, - present, hass: HomeAssistant, - mock_mozart_client, - mock_config_entry, hass_ws_client: WebSocketGenerator, + mock_mozart_client: AsyncMock, + mock_config_entry: MockConfigEntry, + child: dict[str, str | bool | None], + present: bool, ) -> None: """Test async_browse_media with audio and video source.""" diff --git a/tests/components/bang_olufsen/test_websocket.py b/tests/components/bang_olufsen/test_websocket.py new file mode 100644 index 00000000000..209550faee5 --- /dev/null +++ b/tests/components/bang_olufsen/test_websocket.py @@ -0,0 +1,163 @@ +"""Test the Bang & Olufsen WebSocket listener.""" + +import logging +from unittest.mock import AsyncMock, Mock + +from mozart_api.models import SoftwareUpdateState +import pytest + +from homeassistant.components.bang_olufsen.const import ( + BANG_OLUFSEN_WEBSOCKET_EVENT, + CONNECTION_STATUS, + DOMAIN, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers.device_registry import DeviceRegistry +from homeassistant.helpers.dispatcher import async_dispatcher_connect + +from .const import TEST_NAME + +from tests.common import MockConfigEntry + + +async def test_connection( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: + """Test on_connection and on_connection_lost logs and calls correctly.""" + + mock_mozart_client.websocket_connected = True + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + connection_callback = mock_mozart_client.get_on_connection.call_args[0][0] + + caplog.set_level(logging.DEBUG) + + mock_connection_callback = Mock() + + async_dispatcher_connect( + hass, + f"{mock_config_entry.unique_id}_{CONNECTION_STATUS}", + mock_connection_callback, + ) + + # Call the WebSocket connection status method + connection_callback() + await hass.async_block_till_done() + + mock_connection_callback.assert_called_once_with(True) + assert f"Connected to the {TEST_NAME} notification channel" in caplog.text + + +async def test_connection_lost( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: + """Test on_connection_lost logs and calls correctly.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + connection_lost_callback = mock_mozart_client.get_on_connection_lost.call_args[0][0] + + mock_connection_lost_callback = Mock() + + async_dispatcher_connect( + hass, + f"{mock_config_entry.unique_id}_{CONNECTION_STATUS}", + mock_connection_lost_callback, + ) + + connection_lost_callback() + await hass.async_block_till_done() + + mock_connection_lost_callback.assert_called_once_with(False) + assert f"Lost connection to the {TEST_NAME}" in caplog.text + + +async def test_on_software_update_state( + hass: HomeAssistant, + device_registry: DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: + """Test software version is updated through on_software_update_state.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + software_update_state_callback = ( + mock_mozart_client.get_software_update_state_notifications.call_args[0][0] + ) + + # Trigger the notification + await software_update_state_callback(SoftwareUpdateState()) + + await hass.async_block_till_done() + + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device.sw_version == "1.0.0" + + +async def test_on_all_notifications_raw( + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + device_registry: DeviceRegistry, + mock_config_entry: MockConfigEntry, + mock_mozart_client: AsyncMock, +) -> None: + """Test on_all_notifications_raw logs and fires as expected.""" + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + + all_notifications_raw_callback = ( + mock_mozart_client.get_all_notifications_raw.call_args[0][0] + ) + + raw_notification = { + "eventData": { + "default": {"level": 40}, + "level": {"level": 40}, + "maximum": {"level": 100}, + "muted": {"muted": False}, + }, + "eventType": "WebSocketEventVolume", + } + raw_notification_full = raw_notification + + # Get device ID for the modified notification that is sent as an event and in the log + device = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + raw_notification_full.update( + { + "device_id": device.id, + "serial_number": mock_config_entry.unique_id, + } + ) + + caplog.set_level(logging.DEBUG) + + mock_event_callback = Mock() + + # Listen to BANG_OLUFSEN_WEBSOCKET_EVENT events + hass.bus.async_listen(BANG_OLUFSEN_WEBSOCKET_EVENT, mock_event_callback) + + # Trigger the notification + all_notifications_raw_callback(raw_notification) + await hass.async_block_till_done() + + assert str(raw_notification_full) in caplog.text + + mocked_call = mock_event_callback.call_args[0][0].as_dict() + assert mocked_call["event_type"] == BANG_OLUFSEN_WEBSOCKET_EVENT + assert mocked_call["data"] == raw_notification_full diff --git a/tests/components/blackbird/test_media_player.py b/tests/components/blackbird/test_media_player.py index ec5a37f72ad..db92dddcc77 100644 --- a/tests/components/blackbird/test_media_player.py +++ b/tests/components/blackbird/test_media_player.py @@ -35,7 +35,7 @@ class AttrDict(dict): class MockBlackbird: """Mock for pyblackbird object.""" - def __init__(self): + def __init__(self) -> None: """Init mock object.""" self.zones = defaultdict(lambda: AttrDict(power=True, av=1)) diff --git a/tests/components/blebox/conftest.py b/tests/components/blebox/conftest.py index 89229575a0b..fb35bae43a1 100644 --- a/tests/components/blebox/conftest.py +++ b/tests/components/blebox/conftest.py @@ -9,6 +9,7 @@ import pytest from homeassistant.components.blebox.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_PORT +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry @@ -77,7 +78,9 @@ def feature_fixture(request: pytest.FixtureRequest) -> Any: return request.getfixturevalue(request.param) -async def async_setup_entities(hass, entity_ids): +async def async_setup_entities( + hass: HomeAssistant, entity_ids: list[str] +) -> list[er.RegistryEntry]: """Return configured entries with the given entity ids.""" config_entry = mock_config() @@ -90,7 +93,7 @@ async def async_setup_entities(hass, entity_ids): return [entity_registry.async_get(entity_id) for entity_id in entity_ids] -async def async_setup_entity(hass, entity_id): +async def async_setup_entity(hass: HomeAssistant, entity_id: str) -> er.RegistryEntry: """Return a configured entry with the given entity_id.""" return (await async_setup_entities(hass, [entity_id]))[0] diff --git a/tests/components/blebox/test_cover.py b/tests/components/blebox/test_cover.py index 1596de134c0..1900a6d6834 100644 --- a/tests/components/blebox/test_cover.py +++ b/tests/components/blebox/test_cover.py @@ -22,7 +22,9 @@ from homeassistant.const import ( ATTR_DEVICE_CLASS, ATTR_SUPPORTED_FEATURES, SERVICE_CLOSE_COVER, + SERVICE_CLOSE_COVER_TILT, SERVICE_OPEN_COVER, + SERVICE_OPEN_COVER_TILT, SERVICE_SET_COVER_POSITION, SERVICE_SET_COVER_TILT_POSITION, SERVICE_STOP_COVER, @@ -473,3 +475,57 @@ async def test_set_tilt_position(shutterbox, hass: HomeAssistant) -> None: blocking=True, ) assert hass.states.get(entity_id).state == STATE_OPENING + + +async def test_open_tilt(shutterbox, hass: HomeAssistant) -> None: + """Test closing tilt.""" + feature_mock, entity_id = shutterbox + + def initial_update(): + feature_mock.tilt_current = 100 + + def set_tilt_position(tilt_position): + assert tilt_position == 0 + feature_mock.tilt_current = tilt_position + + feature_mock.async_update = AsyncMock(side_effect=initial_update) + feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt_position) + + await async_setup_entity(hass, entity_id) + feature_mock.async_update = AsyncMock() + + await hass.services.async_call( + "cover", + SERVICE_OPEN_COVER_TILT, + {"entity_id": entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 100 # inverted + + +async def test_close_tilt(shutterbox, hass: HomeAssistant) -> None: + """Test closing tilt.""" + feature_mock, entity_id = shutterbox + + def initial_update(): + feature_mock.tilt_current = 0 + + def set_tilt_position(tilt_position): + assert tilt_position == 100 + feature_mock.tilt_current = tilt_position + + feature_mock.async_update = AsyncMock(side_effect=initial_update) + feature_mock.async_set_tilt_position = AsyncMock(side_effect=set_tilt_position) + + await async_setup_entity(hass, entity_id) + feature_mock.async_update = AsyncMock() + + await hass.services.async_call( + "cover", + SERVICE_CLOSE_COVER_TILT, + {"entity_id": entity_id}, + blocking=True, + ) + state = hass.states.get(entity_id) + assert state.attributes[ATTR_CURRENT_TILT_POSITION] == 0 # inverted diff --git a/tests/components/blink/test_config_flow.py b/tests/components/blink/test_config_flow.py index 9c3193ec7d6..c89ab65ea1d 100644 --- a/tests/components/blink/test_config_flow.py +++ b/tests/components/blink/test_config_flow.py @@ -10,6 +10,8 @@ from homeassistant.components.blink import DOMAIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + async def test_form(hass: HomeAssistant) -> None: """Test we get the form.""" @@ -292,10 +294,11 @@ async def test_form_unknown_error(hass: HomeAssistant) -> None: async def test_reauth_shows_user_step(hass: HomeAssistant) -> None: """Test reauth shows the user form.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, + mock_entry = MockConfigEntry( + domain=DOMAIN, data={"username": "blink@example.com", "password": "invalid_password"}, ) + mock_entry.add_to_hass(hass) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/blue_current/test_config_flow.py b/tests/components/blue_current/test_config_flow.py index 33346990425..a9dea70431f 100644 --- a/tests/components/blue_current/test_config_flow.py +++ b/tests/components/blue_current/test_config_flow.py @@ -129,6 +129,11 @@ async def test_reauth( expected_api_token: str, ) -> None: """Test reauth flow.""" + config_entry.add_to_hass(hass) + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + with ( patch( "homeassistant.components.blue_current.config_flow.Client.validate_api_token", @@ -146,20 +151,6 @@ async def test_reauth( lambda self, on_data, on_open: hass.loop.create_future(), ), ): - config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data={"api_token": "123"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"api_token": "1234567890"}, diff --git a/tests/components/bluesound/conftest.py b/tests/components/bluesound/conftest.py index 096db055b45..155d6b66e4e 100644 --- a/tests/components/bluesound/conftest.py +++ b/tests/components/bluesound/conftest.py @@ -69,7 +69,7 @@ def status() -> Status: @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.bluesound.async_setup_entry", return_value=True diff --git a/tests/components/bluetooth/__init__.py b/tests/components/bluetooth/__init__.py index eae867b96d5..8794d808718 100644 --- a/tests/components/bluetooth/__init__.py +++ b/tests/components/bluetooth/__init__.py @@ -271,7 +271,7 @@ async def _async_setup_with_adapter( class MockBleakClient(BleakClient): """Mock bleak client.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock init.""" super().__init__(*args, **kwargs) self._device_path = "/dev/test" diff --git a/tests/components/bluetooth/test_init.py b/tests/components/bluetooth/test_init.py index bd38c9cfbae..8e7d604f794 100644 --- a/tests/components/bluetooth/test_init.py +++ b/tests/components/bluetooth/test_init.py @@ -3,6 +3,7 @@ import asyncio from datetime import timedelta import time +from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from bleak import BleakError @@ -100,7 +101,7 @@ async def test_setup_and_stop_passive( init_kwargs = None class MockPassiveBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs @@ -151,7 +152,7 @@ async def test_setup_and_stop_old_bluez( init_kwargs = None class MockBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs diff --git a/tests/components/bluetooth/test_passive_update_processor.py b/tests/components/bluetooth/test_passive_update_processor.py index 079ac2200fc..d7a7a8ba08c 100644 --- a/tests/components/bluetooth/test_passive_update_processor.py +++ b/tests/components/bluetooth/test_passive_update_processor.py @@ -583,8 +583,7 @@ async def test_exception_from_update_method( nonlocal run_count run_count += 1 if run_count == 2: - # pylint: disable-next=broad-exception-raised - raise Exception("Test exception") + raise Exception("Test exception") # noqa: TRY002 return GENERIC_PASSIVE_BLUETOOTH_DATA_UPDATE coordinator = PassiveBluetoothProcessorCoordinator( @@ -1418,8 +1417,7 @@ async def test_exception_from_coordinator_update_method( nonlocal run_count run_count += 1 if run_count == 2: - # pylint: disable-next=broad-exception-raised - raise Exception("Test exception") + raise Exception("Test exception") # noqa: TRY002 return {"test": "data"} @callback diff --git a/tests/components/bluetooth/test_scanner.py b/tests/components/bluetooth/test_scanner.py index dc25f29111c..6acb86476e7 100644 --- a/tests/components/bluetooth/test_scanner.py +++ b/tests/components/bluetooth/test_scanner.py @@ -3,6 +3,7 @@ import asyncio from datetime import timedelta import time +from typing import Any from unittest.mock import ANY, MagicMock, patch from bleak import BleakError @@ -211,7 +212,7 @@ async def test_recovery_from_dbus_restart(hass: HomeAssistant) -> None: mock_discovered = [] class MockBleakScanner: - def __init__(self, detection_callback, *args, **kwargs): + def __init__(self, detection_callback, *args: Any, **kwargs: Any) -> None: nonlocal _callback _callback = detection_callback @@ -631,7 +632,7 @@ async def test_setup_and_stop_macos( init_kwargs = None class MockBleakScanner: - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the scanner.""" nonlocal init_kwargs init_kwargs = kwargs diff --git a/tests/components/bluetooth/test_wrappers.py b/tests/components/bluetooth/test_wrappers.py index 5fc3d70c97a..c5908776882 100644 --- a/tests/components/bluetooth/test_wrappers.py +++ b/tests/components/bluetooth/test_wrappers.py @@ -22,7 +22,7 @@ from homeassistant.components.bluetooth import ( HaBluetoothConnector, HomeAssistantBluetoothManager, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import CALLBACK_TYPE, HomeAssistant from . import _get_manager, generate_advertisement_data, generate_ble_device @@ -164,7 +164,11 @@ def mock_platform_client_that_raises_on_connect_fixture(): yield -def _generate_scanners_with_fake_devices(hass): +def _generate_scanners_with_fake_devices( + hass: HomeAssistant, +) -> tuple[ + dict[str, tuple[BLEDevice, AdvertisementData]], CALLBACK_TYPE, CALLBACK_TYPE +]: """Generate scanners with fake devices.""" manager = _get_manager() hci0_device_advs = {} diff --git a/tests/components/bluetooth_le_tracker/test_device_tracker.py b/tests/components/bluetooth_le_tracker/test_device_tracker.py index f183f987cde..452297e38c2 100644 --- a/tests/components/bluetooth_le_tracker/test_device_tracker.py +++ b/tests/components/bluetooth_le_tracker/test_device_tracker.py @@ -1,6 +1,7 @@ """Test Bluetooth LE device tracker.""" from datetime import timedelta +from typing import Any from unittest.mock import patch from bleak import BleakError @@ -31,7 +32,7 @@ from tests.components.bluetooth import generate_advertisement_data, generate_ble class MockBleakClient: """Mock BleakClient.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr index 8a26acd1040..2182ff2bb48 100644 --- a/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr +++ b/tests/components/bmw_connected_drive/snapshots/test_sensor.ambr @@ -929,6 +929,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -968,6 +969,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -1933,6 +1935,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -1972,6 +1975,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -2665,6 +2669,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), @@ -2704,6 +2709,7 @@ 'options': list([ 'cooling', 'heating', + 'ventilation', 'inactive', 'standby', ]), diff --git a/tests/components/bmw_connected_drive/test_button.py b/tests/components/bmw_connected_drive/test_button.py index 99cabc900fa..88c7990cde9 100644 --- a/tests/components/bmw_connected_drive/test_button.py +++ b/tests/components/bmw_connected_drive/test_button.py @@ -165,7 +165,7 @@ async def test_service_call_success_state_change( ( "button.i4_edrive40_find_vehicle", "device_tracker.i4_edrive40", - {"latitude": 123.456, "longitude": 34.5678, "direction": 121}, + {"latitude": 12.345, "longitude": 34.5678, "direction": 121}, {"latitude": 48.177334, "longitude": 11.556274, "direction": 180}, ), ], diff --git a/tests/components/bmw_connected_drive/test_config_flow.py b/tests/components/bmw_connected_drive/test_config_flow.py index f346cd70b26..f71730fcc17 100644 --- a/tests/components/bmw_connected_drive/test_config_flow.py +++ b/tests/components/bmw_connected_drive/test_config_flow.py @@ -188,15 +188,7 @@ async def test_reauth(hass: HomeAssistant) -> None: assert config_entry.data == config_entry_with_wrong_password["data"] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {} diff --git a/tests/components/bosch_shc/test_config_flow.py b/tests/components/bosch_shc/test_config_flow.py index 2c43ec0a370..eaabe112807 100644 --- a/tests/components/bosch_shc/test_config_flow.py +++ b/tests/components/bosch_shc/test_config_flow.py @@ -646,11 +646,7 @@ async def test_reauth(hass: HomeAssistant) -> None: title="shc012345", ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/braviatv/test_config_flow.py b/tests/components/braviatv/test_config_flow.py index 6fc02dbd36f..7a4f93f7f16 100644 --- a/tests/components/braviatv/test_config_flow.py +++ b/tests/components/braviatv/test_config_flow.py @@ -17,7 +17,7 @@ from homeassistant.components.braviatv.const import ( DOMAIN, NICKNAME_PREFIX, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_HOST, CONF_MAC, CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -405,6 +405,9 @@ async def test_reauth_successful(hass: HomeAssistant, use_psk, new_pin) -> None: title="TV-Model", ) config_entry.add_to_hass(hass) + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "authorize" with ( patch("pybravia.BraviaClient.connect"), @@ -421,15 +424,6 @@ async def test_reauth_successful(hass: HomeAssistant, use_psk, new_pin) -> None: return_value={}, ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=config_entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "authorize" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_USE_PSK: use_psk} ) diff --git a/tests/components/bring/conftest.py b/tests/components/bring/conftest.py index 6c39c5020f9..60c13a1c208 100644 --- a/tests/components/bring/conftest.py +++ b/tests/components/bring/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from typing import cast from unittest.mock import AsyncMock, patch +import uuid from bring_api.types import BringAuthResponse import pytest @@ -10,7 +11,7 @@ import pytest from homeassistant.components.bring import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD -from tests.common import MockConfigEntry +from tests.common import MockConfigEntry, load_json_object_fixture EMAIL = "test-email" PASSWORD = "test-password" @@ -43,10 +44,23 @@ def mock_bring_client() -> Generator[AsyncMock]: client = mock_client.return_value client.uuid = UUID client.login.return_value = cast(BringAuthResponse, {"name": "Bring"}) - client.load_lists.return_value = {"lists": []} + client.load_lists.return_value = load_json_object_fixture("lists.json", DOMAIN) + client.get_list.return_value = load_json_object_fixture("items.json", DOMAIN) yield client +@pytest.fixture +def mock_uuid() -> Generator[AsyncMock]: + """Mock uuid.""" + + with patch( + "homeassistant.components.bring.todo.uuid.uuid4", + autospec=True, + ) as mock_client: + mock_client.return_value = uuid.UUID("b669ad23-606a-4652-b302-995d34b1cb1c") + yield mock_client + + @pytest.fixture(name="bring_config_entry") def mock_bring_config_entry() -> MockConfigEntry: """Mock bring configuration entry.""" diff --git a/tests/components/bring/fixtures/items.json b/tests/components/bring/fixtures/items.json new file mode 100644 index 00000000000..43e05a39fbb --- /dev/null +++ b/tests/components/bring/fixtures/items.json @@ -0,0 +1,26 @@ +{ + "uuid": "77a151f8-77c4-47a3-8295-c750a0e69d4f", + "status": "REGISTERED", + "purchase": [ + { + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "itemId": "Paprika", + "specification": "Rot", + "attributes": [] + }, + { + "uuid": "72d370ab-d8ca-4e41-b956-91df94795b4e", + "itemId": "Pouletbrüstli", + "specification": "Bio", + "attributes": [] + } + ], + "recently": [ + { + "uuid": "fc8db30a-647e-4e6c-9d71-3b85d6a2d954", + "itemId": "Ananas", + "specification": "", + "attributes": [] + } + ] +} diff --git a/tests/components/bring/fixtures/lists.json b/tests/components/bring/fixtures/lists.json new file mode 100644 index 00000000000..5891d94f7de --- /dev/null +++ b/tests/components/bring/fixtures/lists.json @@ -0,0 +1,14 @@ +{ + "lists": [ + { + "listUuid": "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + "name": "Einkauf", + "theme": "ch.publisheria.bring.theme.home" + }, + { + "listUuid": "b4776778-7f6c-496e-951b-92a35d3db0dd", + "name": "Baumarkt", + "theme": "ch.publisheria.bring.theme.home" + } + ] +} diff --git a/tests/components/bring/snapshots/test_todo.ambr b/tests/components/bring/snapshots/test_todo.ambr new file mode 100644 index 00000000000..6a24b4148b7 --- /dev/null +++ b/tests/components/bring/snapshots/test_todo.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_todo[todo.baumarkt-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.baumarkt', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Baumarkt', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': '00000000-00000000-00000000-00000000_b4776778-7f6c-496e-951b-92a35d3db0dd', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.baumarkt-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Baumarkt', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.baumarkt', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_todo[todo.einkauf-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'todo', + 'entity_category': None, + 'entity_id': 'todo.einkauf', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Einkauf', + 'platform': 'bring', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'shopping_list', + 'unique_id': '00000000-00000000-00000000-00000000_e542eef6-dba7-4c31-a52c-29e6ab9d83a5', + 'unit_of_measurement': None, + }) +# --- +# name: test_todo[todo.einkauf-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Einkauf', + 'supported_features': , + }), + 'context': , + 'entity_id': 'todo.einkauf', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- diff --git a/tests/components/bring/test_config_flow.py b/tests/components/bring/test_config_flow.py index d307e0ccbbe..8d215a5d3ee 100644 --- a/tests/components/bring/test_config_flow.py +++ b/tests/components/bring/test_config_flow.py @@ -10,7 +10,7 @@ from bring_api.exceptions import ( import pytest from homeassistant.components.bring.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,15 +123,7 @@ async def test_flow_reauth( bring_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": bring_config_entry.entry_id, - "unique_id": bring_config_entry.unique_id, - }, - ) - + result = await bring_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -171,15 +163,7 @@ async def test_flow_reauth_error_and_recover( bring_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": bring_config_entry.entry_id, - "unique_id": bring_config_entry.unique_id, - }, - ) - + result = await bring_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/bring/test_init.py b/tests/components/bring/test_init.py index f1b1f78e775..613b65e38b6 100644 --- a/tests/components/bring/test_init.py +++ b/tests/components/bring/test_init.py @@ -28,9 +28,9 @@ async def setup_integration( await hass.async_block_till_done() +@pytest.mark.usefixtures("mock_bring_client") async def test_load_unload( hass: HomeAssistant, - mock_bring_client: AsyncMock, bring_config_entry: MockConfigEntry, ) -> None: """Test loading and unloading of the config entry.""" @@ -58,7 +58,7 @@ async def test_init_failure( mock_bring_client: AsyncMock, status: ConfigEntryState, exception: Exception, - bring_config_entry: MockConfigEntry | None, + bring_config_entry: MockConfigEntry, ) -> None: """Test an initialization error on integration load.""" mock_bring_client.login.side_effect = exception @@ -79,7 +79,7 @@ async def test_init_exceptions( mock_bring_client: AsyncMock, exception: Exception, expected: Exception, - bring_config_entry: MockConfigEntry | None, + bring_config_entry: MockConfigEntry, ) -> None: """Test an initialization error on integration load.""" bring_config_entry.add_to_hass(hass) @@ -87,3 +87,42 @@ async def test_init_exceptions( with pytest.raises(expected): await async_setup_entry(hass, bring_config_entry) + + +@pytest.mark.parametrize("exception", [BringRequestException, BringParseException]) +@pytest.mark.parametrize("bring_method", ["load_lists", "get_list"]) +async def test_config_entry_not_ready( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, + exception: Exception, + bring_method: str, +) -> None: + """Test config entry not ready.""" + getattr(mock_bring_client, bring_method).side_effect = exception + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.SETUP_RETRY + + +@pytest.mark.parametrize( + "exception", [None, BringAuthException, BringRequestException, BringParseException] +) +async def test_config_entry_not_ready_auth_error( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, + exception: Exception | None, +) -> None: + """Test config entry not ready from authentication error.""" + + mock_bring_client.load_lists.side_effect = BringAuthException + mock_bring_client.retrieve_new_access_token.side_effect = exception + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.SETUP_RETRY diff --git a/tests/components/bring/test_notification.py b/tests/components/bring/test_notification.py new file mode 100644 index 00000000000..b1fa28335ad --- /dev/null +++ b/tests/components/bring/test_notification.py @@ -0,0 +1,106 @@ +"""Test todo entity notification action of the Bring! integration.""" + +import re +from unittest.mock import AsyncMock + +from bring_api import BringNotificationType, BringRequestException +import pytest + +from homeassistant.components.bring.const import ( + ATTR_ITEM_NAME, + ATTR_NOTIFICATION_TYPE, + DOMAIN, + SERVICE_PUSH_NOTIFICATION, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from tests.common import MockConfigEntry + + +async def test_send_notification( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test send bring push notification.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + DOMAIN, + SERVICE_PUSH_NOTIFICATION, + service_data={ + ATTR_NOTIFICATION_TYPE: "GOING_SHOPPING", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.notify.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + BringNotificationType.GOING_SHOPPING, + None, + ) + + +async def test_send_notification_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test send bring push notification with exception.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + mock_bring_client.notify.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, + match="Failed to send push notification for bring due to a connection error, try again later", + ): + await hass.services.async_call( + DOMAIN, + SERVICE_PUSH_NOTIFICATION, + service_data={ + ATTR_NOTIFICATION_TYPE: "GOING_SHOPPING", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + +async def test_send_notification_service_validation_error( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test send bring push notification.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + mock_bring_client.notify.side_effect = ValueError + with pytest.raises( + HomeAssistantError, + match=re.escape( + "Failed to perform action bring.send_message. 'URGENT_MESSAGE' requires a value @ data['item']. Got None" + ), + ): + await hass.services.async_call( + DOMAIN, + SERVICE_PUSH_NOTIFICATION, + service_data={ATTR_NOTIFICATION_TYPE: "URGENT_MESSAGE", ATTR_ITEM_NAME: ""}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) diff --git a/tests/components/bring/test_todo.py b/tests/components/bring/test_todo.py new file mode 100644 index 00000000000..d67429e8f49 --- /dev/null +++ b/tests/components/bring/test_todo.py @@ -0,0 +1,302 @@ +"""Test for todo platform of the Bring! integration.""" + +import re +from unittest.mock import AsyncMock + +from bring_api import BringItemOperation, BringRequestException +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.todo import ( + ATTR_DESCRIPTION, + ATTR_ITEM, + ATTR_RENAME, + DOMAIN as TODO_DOMAIN, + TodoServices, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("mock_bring_client") +async def test_todo( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, +) -> None: + """Snapshot test states of todo platform.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await snapshot_platform( + hass, entity_registry, snapshot, bring_config_entry.entry_id + ) + + +@pytest.mark.usefixtures("mock_uuid") +async def test_add_item( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test add item to list.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel", ATTR_DESCRIPTION: "rot"}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.save_item.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + "Äpfel", + "rot", + "b669ad23-606a-4652-b302-995d34b1cb1c", + ) + + +async def test_add_item_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test add item to list with exception.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + mock_bring_client.save_item.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, match="Failed to save item Äpfel to Bring! list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.ADD_ITEM, + service_data={ATTR_ITEM: "Äpfel", ATTR_DESCRIPTION: "rot"}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_uuid") +async def test_update_item( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test update item.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", + ATTR_RENAME: "Paprika", + ATTR_DESCRIPTION: "Rot", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.batch_update_list.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + { + "itemId": "Paprika", + "spec": "Rot", + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + }, + BringItemOperation.ADD, + ) + + +async def test_update_item_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test update item with exception.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + mock_bring_client.batch_update_list.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, match="Failed to update item Paprika to Bring! list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", + ATTR_RENAME: "Paprika", + ATTR_DESCRIPTION: "Rot", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_uuid") +async def test_rename_item( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test rename item.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", + ATTR_RENAME: "Gurke", + ATTR_DESCRIPTION: "", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.batch_update_list.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + [ + { + "itemId": "Paprika", + "spec": "", + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "operation": BringItemOperation.REMOVE, + }, + { + "itemId": "Gurke", + "spec": "", + "uuid": "b669ad23-606a-4652-b302-995d34b1cb1c", + "operation": BringItemOperation.ADD, + }, + ], + ) + + +async def test_rename_item_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test rename item with exception.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + mock_bring_client.batch_update_list.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, match="Failed to rename item Gurke to Bring! list" + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.UPDATE_ITEM, + service_data={ + ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de", + ATTR_RENAME: "Gurke", + ATTR_DESCRIPTION: "", + }, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + +@pytest.mark.usefixtures("mock_uuid") +async def test_delete_items( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test delete item.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de"}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) + + mock_bring_client.batch_update_list.assert_called_once_with( + "e542eef6-dba7-4c31-a52c-29e6ab9d83a5", + [ + { + "itemId": "b5d0790b-5f32-4d5c-91da-e29066f167de", + "spec": "", + "uuid": "b5d0790b-5f32-4d5c-91da-e29066f167de", + }, + ], + BringItemOperation.REMOVE, + ) + + +async def test_delete_items_exception( + hass: HomeAssistant, + bring_config_entry: MockConfigEntry, + mock_bring_client: AsyncMock, +) -> None: + """Test delete item.""" + + bring_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(bring_config_entry.entry_id) + await hass.async_block_till_done() + + assert bring_config_entry.state is ConfigEntryState.LOADED + mock_bring_client.batch_update_list.side_effect = BringRequestException + with pytest.raises( + HomeAssistantError, + match=re.escape("Failed to delete 1 item(s) from Bring! list"), + ): + await hass.services.async_call( + TODO_DOMAIN, + TodoServices.REMOVE_ITEM, + service_data={ATTR_ITEM: "b5d0790b-5f32-4d5c-91da-e29066f167de"}, + target={ATTR_ENTITY_ID: "todo.einkauf"}, + blocking=True, + ) diff --git a/tests/components/broadlink/__init__.py b/tests/components/broadlink/__init__.py index 61ef27815fd..6185e9bdefc 100644 --- a/tests/components/broadlink/__init__.py +++ b/tests/components/broadlink/__init__.py @@ -4,6 +4,7 @@ from dataclasses import dataclass from unittest.mock import MagicMock, patch from homeassistant.components.broadlink.const import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -115,20 +116,34 @@ class BroadlinkDevice: """Representation of a Broadlink device.""" def __init__( - self, name, host, mac, model, manufacturer, type_, devtype, fwversion, timeout - ): + self, + name: str, + host: str, + mac: str, + model: str, + manufacturer: str, + type_: str, + devtype: int, + fwversion: int, + timeout: int, + ) -> None: """Initialize the device.""" - self.name: str = name - self.host: str = host - self.mac: str = mac - self.model: str = model - self.manufacturer: str = manufacturer - self.type: str = type_ - self.devtype: int = devtype - self.timeout: int = timeout - self.fwversion: int = fwversion + self.name = name + self.host = host + self.mac = mac + self.model = model + self.manufacturer = manufacturer + self.type = type_ + self.devtype = devtype + self.timeout = timeout + self.fwversion = fwversion - async def setup_entry(self, hass, mock_api=None, mock_entry=None): + async def setup_entry( + self, + hass: HomeAssistant, + mock_api: MagicMock | None = None, + mock_entry: MockConfigEntry | None = None, + ) -> MockSetup: """Set up the device.""" mock_api = mock_api or self.get_mock_api() mock_entry = mock_entry or self.get_mock_entry() diff --git a/tests/components/broadlink/test_config_flow.py b/tests/components/broadlink/test_config_flow.py index 2def8c0b3b9..f31cb380631 100644 --- a/tests/components/broadlink/test_config_flow.py +++ b/tests/components/broadlink/test_config_flow.py @@ -734,13 +734,9 @@ async def test_flow_reauth_works(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) - + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reset" @@ -770,12 +766,8 @@ async def test_flow_reauth_invalid_host(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} - with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) device.mac = get_device("Office").mac mock_api = device.get_mock_api() @@ -804,12 +796,9 @@ async def test_flow_reauth_valid_host(hass: HomeAssistant) -> None: mock_entry.add_to_hass(hass) mock_api = device.get_mock_api() mock_api.auth.side_effect = blke.AuthenticationError() - data = {"name": device.name, **device.get_entry_data()} with patch(DEVICE_FACTORY, return_value=mock_api): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=data - ) + result = await mock_entry.start_reauth_flow(hass, data={"name": device.name}) device.host = "192.168.1.128" mock_api = device.get_mock_api() diff --git a/tests/components/brunt/test_config_flow.py b/tests/components/brunt/test_config_flow.py index 2796882a3c1..7a805a9ee52 100644 --- a/tests/components/brunt/test_config_flow.py +++ b/tests/components/brunt/test_config_flow.py @@ -110,15 +110,7 @@ async def test_reauth( unique_id="test-username", ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=None, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" with patch( diff --git a/tests/components/bryant_evolution/conftest.py b/tests/components/bryant_evolution/conftest.py index cc9dfbec1e1..fb12d7ebf29 100644 --- a/tests/components/bryant_evolution/conftest.py +++ b/tests/components/bryant_evolution/conftest.py @@ -15,7 +15,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.bryant_evolution.async_setup_entry", return_value=True @@ -31,7 +31,7 @@ for the Bryant Evolution integration. @pytest.fixture(autouse=True) -def mock_evolution_client_factory() -> Generator[AsyncMock, None, None]: +def mock_evolution_client_factory() -> Generator[AsyncMock]: """Mock an Evolution client.""" with patch( "evolutionhttp.BryantEvolutionLocalClient.get_client", diff --git a/tests/components/bryant_evolution/test_climate.py b/tests/components/bryant_evolution/test_climate.py index 42944c32bc2..0b527e02a10 100644 --- a/tests/components/bryant_evolution/test_climate.py +++ b/tests/components/bryant_evolution/test_climate.py @@ -52,7 +52,7 @@ async def test_setup_integration_success( async def test_set_temperature_mode_cool( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, ) -> None: """Test setting the temperature in cool mode.""" @@ -83,7 +83,7 @@ async def test_set_temperature_mode_cool( async def test_set_temperature_mode_heat( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, ) -> None: """Test setting the temperature in heat mode.""" @@ -111,7 +111,7 @@ async def test_set_temperature_mode_heat( async def test_set_temperature_mode_heat_cool( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, ) -> None: """Test setting the temperature in heat_cool mode.""" @@ -147,7 +147,7 @@ async def test_set_temperature_mode_heat_cool( async def test_set_fan_mode( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], ) -> None: """Test that setting fan mode works.""" mock_client = await mock_evolution_client_factory(1, 1, "/dev/unused") @@ -175,7 +175,7 @@ async def test_set_fan_mode( async def test_set_hvac_mode( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], hvac_mode, evolution_mode, ) -> None: @@ -203,7 +203,7 @@ async def test_set_hvac_mode( async def test_read_hvac_action_heat_cool( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, curr_temp: int, expected_action: HVACAction, @@ -236,7 +236,7 @@ async def test_read_hvac_action_heat_cool( async def test_read_hvac_action( hass: HomeAssistant, mock_evolution_entry: MockConfigEntry, - mock_evolution_client_factory: Generator[AsyncMock, None, None], + mock_evolution_client_factory: Generator[AsyncMock], freezer: FrozenDateTimeFactory, mode: str, active: bool, diff --git a/tests/components/bsblan/conftest.py b/tests/components/bsblan/conftest.py index 862f3ae1d0c..13d4017d7c8 100644 --- a/tests/components/bsblan/conftest.py +++ b/tests/components/bsblan/conftest.py @@ -3,7 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch -from bsblan import Device, Info, State +from bsblan import Device, Info, State, StaticState import pytest from homeassistant.components.bsblan.const import CONF_PASSKEY, DOMAIN @@ -42,17 +42,21 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture def mock_bsblan() -> Generator[MagicMock]: """Return a mocked BSBLAN client.""" - with ( patch("homeassistant.components.bsblan.BSBLAN", autospec=True) as bsblan_mock, patch("homeassistant.components.bsblan.config_flow.BSBLAN", new=bsblan_mock), ): bsblan = bsblan_mock.return_value - bsblan.info.return_value = Info.parse_raw(load_fixture("info.json", DOMAIN)) - bsblan.device.return_value = Device.parse_raw( + bsblan.info.return_value = Info.from_json(load_fixture("info.json", DOMAIN)) + bsblan.device.return_value = Device.from_json( load_fixture("device.json", DOMAIN) ) - bsblan.state.return_value = State.parse_raw(load_fixture("state.json", DOMAIN)) + bsblan.state.return_value = State.from_json(load_fixture("state.json", DOMAIN)) + + bsblan.static_values.return_value = StaticState.from_json( + load_fixture("static.json", DOMAIN) + ) + yield bsblan diff --git a/tests/components/bsblan/fixtures/static.json b/tests/components/bsblan/fixtures/static.json new file mode 100644 index 00000000000..8c7abc3397b --- /dev/null +++ b/tests/components/bsblan/fixtures/static.json @@ -0,0 +1,20 @@ +{ + "min_temp": { + "name": "Room temp frost protection setpoint", + "error": 0, + "value": "8.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + }, + "max_temp": { + "name": "Summer/winter changeover temp heat circuit 1", + "error": 0, + "value": "20.0", + "desc": "", + "dataType": 0, + "readonly": 0, + "unit": "°C" + } +} diff --git a/tests/components/bsblan/test_diagnostics.py b/tests/components/bsblan/test_diagnostics.py index 316296df78a..8939456c2ac 100644 --- a/tests/components/bsblan/test_diagnostics.py +++ b/tests/components/bsblan/test_diagnostics.py @@ -16,8 +16,7 @@ async def test_diagnostics( snapshot: SnapshotAssertion, ) -> None: """Test diagnostics.""" - - assert ( - await get_diagnostics_for_config_entry(hass, hass_client, init_integration) - == snapshot + diagnostics_data = await get_diagnostics_for_config_entry( + hass, hass_client, init_integration ) + assert diagnostics_data == snapshot diff --git a/tests/components/bthome/test_config_flow.py b/tests/components/bthome/test_config_flow.py index acf490d341e..faf2f1c9ef5 100644 --- a/tests/components/bthome/test_config_flow.py +++ b/tests/components/bthome/test_config_flow.py @@ -563,16 +563,7 @@ async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: device = DeviceData() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": {"name": entry.title}, - "unique_id": entry.unique_id, - }, - data=entry.data | {"device": device}, - ) + result = await entry.start_reauth_flow(hass, data={"device": device}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" diff --git a/tests/components/buienradar/test_camera.py b/tests/components/buienradar/test_camera.py index 9ef986b094c..f1518a1a0ea 100644 --- a/tests/components/buienradar/test_camera.py +++ b/tests/components/buienradar/test_camera.py @@ -8,6 +8,7 @@ from http import HTTPStatus from aiohttp.client_exceptions import ClientResponseError from homeassistant.components.buienradar.const import CONF_DELTA, DOMAIN +from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_COUNTRY_CODE, CONF_LATITUDE, CONF_LONGITUDE from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -31,7 +32,7 @@ def radar_map_url(country_code: str = "NL") -> str: return f"https://api.buienradar.nl/image/1.0/RadarMap{country_code}?w=700&h=700" -async def _setup_config_entry(hass, entry): +async def _setup_config_entry(hass: HomeAssistant, entry: ConfigEntry) -> None: entity_registry = er.async_get(hass) entity_registry.async_get_or_create( domain="camera", diff --git a/tests/components/caldav/test_config_flow.py b/tests/components/caldav/test_config_flow.py index 0079e59a931..bf22fb0bd9c 100644 --- a/tests/components/caldav/test_config_flow.py +++ b/tests/components/caldav/test_config_flow.py @@ -106,13 +106,7 @@ async def test_reauth_success( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -147,13 +141,7 @@ async def test_reauth_failure( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/calendar/snapshots/test_init.ambr b/tests/components/calendar/snapshots/test_init.ambr index fe23c5dbac9..1b2bb9f0196 100644 --- a/tests/components/calendar/snapshots/test_init.ambr +++ b/tests/components/calendar/snapshots/test_init.ambr @@ -7,12 +7,6 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_1-00:15:00-list_events] - dict({ - 'events': list([ - ]), - }) -# --- # name: test_list_events_service_duration[frozen_time-calendar.calendar_1-01:00:00-get_events] dict({ 'calendar.calendar_1': dict({ @@ -28,19 +22,6 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_1-01:00:00-list_events] - dict({ - 'events': list([ - dict({ - 'description': 'Future Description', - 'end': '2023-10-19T09:20:05-06:00', - 'location': 'Future Location', - 'start': '2023-10-19T08:20:05-06:00', - 'summary': 'Future Event', - }), - ]), - }) -# --- # name: test_list_events_service_duration[frozen_time-calendar.calendar_2-00:15:00-get_events] dict({ 'calendar.calendar_2': dict({ @@ -54,14 +35,3 @@ }), }) # --- -# name: test_list_events_service_duration[frozen_time-calendar.calendar_2-00:15:00-list_events] - dict({ - 'events': list([ - dict({ - 'end': '2023-10-19T08:20:05-06:00', - 'start': '2023-10-19T07:20:05-06:00', - 'summary': 'Current Event', - }), - ]), - }) -# --- diff --git a/tests/components/cast/test_media_player.py b/tests/components/cast/test_media_player.py index 1d99adb4723..513f32b1ad6 100644 --- a/tests/components/cast/test_media_player.py +++ b/tests/components/cast/test_media_player.py @@ -3,7 +3,9 @@ from __future__ import annotations import asyncio +from collections.abc import Callable import json +from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, Mock, patch from uuid import UUID @@ -112,7 +114,9 @@ def get_fake_zconf(host="192.168.178.42", port=8009): return zconf -async def async_setup_cast(hass, config=None): +async def async_setup_cast( + hass: HomeAssistant, config: dict[str, Any] | None = None +) -> MagicMock: """Set up the cast platform.""" if config is None: config = {} @@ -128,7 +132,20 @@ async def async_setup_cast(hass, config=None): return add_entities -async def async_setup_cast_internal_discovery(hass, config=None): +async def async_setup_cast_internal_discovery( + hass: HomeAssistant, config: dict[str, Any] | None = None +) -> tuple[ + Callable[ + [ + pychromecast.discovery.HostServiceInfo + | pychromecast.discovery.MDNSServiceInfo, + ChromecastInfo, + ], + None, + ], + Callable[[str, ChromecastInfo], None], + MagicMock, +]: """Set up the cast platform and the discovery.""" browser = MagicMock(devices={}, zc={}) diff --git a/tests/components/chacon_dio/conftest.py b/tests/components/chacon_dio/conftest.py index 3c3b970cec0..186bc468bee 100644 --- a/tests/components/chacon_dio/conftest.py +++ b/tests/components/chacon_dio/conftest.py @@ -65,6 +65,8 @@ def mock_dio_chacon_client() -> Generator[AsyncMock]: client.get_user_id.return_value = "dummy-user-id" client.search_all_devices.return_value = MOCK_COVER_DEVICE + client.switch_switch.return_value = {} + client.move_shutter_direction.return_value = {} client.disconnect.return_value = {} diff --git a/tests/components/chacon_dio/snapshots/test_switch.ambr b/tests/components/chacon_dio/snapshots/test_switch.ambr new file mode 100644 index 00000000000..7a65dad5445 --- /dev/null +++ b/tests/components/chacon_dio/snapshots/test_switch.ambr @@ -0,0 +1,48 @@ +# serializer version: 1 +# name: test_entities[switch.switch_mock_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.switch_mock_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'chacon_dio', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'L4HActuator_idmock1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[switch.switch_mock_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Switch mock 1', + }), + 'context': , + 'entity_id': 'switch.switch_mock_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/chacon_dio/test_cover.py b/tests/components/chacon_dio/test_cover.py index be606e67e1e..24e6e8581d8 100644 --- a/tests/components/chacon_dio/test_cover.py +++ b/tests/components/chacon_dio/test_cover.py @@ -17,9 +17,11 @@ from homeassistant.components.cover import ( STATE_OPEN, STATE_OPENING, ) +from homeassistant.components.homeassistant import SERVICE_UPDATE_ENTITY from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.helpers import entity_registry as er +from homeassistant.setup import async_setup_component from . import setup_integration @@ -42,6 +44,38 @@ async def test_entities( await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) +async def test_update( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the creation and values of the Chacon Dio covers.""" + + await setup_integration(hass, mock_config_entry) + + mock_dio_chacon_client.get_status_details.return_value = { + "L4HActuator_idmock1": { + "id": "L4HActuator_idmock1", + "connected": True, + "openlevel": 51, + "movement": "stop", + } + } + + await async_setup_component(hass, HOMEASSISTANT_DOMAIN, {}) + await hass.services.async_call( + HOMEASSISTANT_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: COVER_ENTITY_ID}, + blocking=True, + ) + + state = hass.states.get(COVER_ENTITY_ID) + assert state + assert state.attributes.get(ATTR_CURRENT_POSITION) == 51 + assert state.state == STATE_OPEN + + async def test_cover_actions( hass: HomeAssistant, mock_dio_chacon_client: AsyncMock, @@ -100,7 +134,7 @@ async def test_cover_callbacks( mock_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, ) -> None: - """Test the creation and values of the Chacon Dio covers.""" + """Test the callbacks on the Chacon Dio covers.""" await setup_integration(hass, mock_config_entry) diff --git a/tests/components/chacon_dio/test_switch.py b/tests/components/chacon_dio/test_switch.py new file mode 100644 index 00000000000..a5ad0d0ea13 --- /dev/null +++ b/tests/components/chacon_dio/test_switch.py @@ -0,0 +1,132 @@ +"""Test the Chacon Dio switch.""" + +from collections.abc import Callable +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +SWITCH_ENTITY_ID = "switch.switch_mock_1" + +MOCK_SWITCH_DEVICE = { + "L4HActuator_idmock1": { + "id": "L4HActuator_idmock1", + "name": "Switch mock 1", + "type": "SWITCH_LIGHT", + "model": "CERNwd-3B_1.0.6", + "connected": True, + "is_on": True, + } +} + + +async def test_entities( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Chacon Dio switches.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_switch_actions( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test the actions on the Chacon Dio switch.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, + blocking=True, + ) + state = hass.states.get(SWITCH_ENTITY_ID) + assert state.state == STATE_ON + + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: SWITCH_ENTITY_ID}, + blocking=True, + ) + state = hass.states.get(SWITCH_ENTITY_ID) + # turn off does not change directly the state, it is made by a server side callback. + assert state.state == STATE_ON + + +async def test_switch_callbacks( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the callbacks on the Chacon Dio switches.""" + + mock_dio_chacon_client.search_all_devices.return_value = MOCK_SWITCH_DEVICE + + await setup_integration(hass, mock_config_entry) + + # Server side callback tests + # We find the callback method on the mock client + callback_device_state_function: Callable = ( + mock_dio_chacon_client.set_callback_device_state_by_device.call_args[0][1] + ) + + # Define a method to simply call it + async def _callback_device_state_function(is_on: bool) -> None: + callback_device_state_function( + { + "id": "L4HActuator_idmock1", + "connected": True, + "is_on": is_on, + } + ) + await hass.async_block_till_done() + + # And call it to effectively launch the callback as the server would do + await _callback_device_state_function(False) + state = hass.states.get(SWITCH_ENTITY_ID) + assert state + assert state.state == STATE_OFF + + +async def test_no_switch_found( + hass: HomeAssistant, + mock_dio_chacon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test the switch absence.""" + + mock_dio_chacon_client.search_all_devices.return_value = None + + await setup_integration(hass, mock_config_entry) + + assert not hass.states.async_entity_ids(SWITCH_DOMAIN) diff --git a/tests/components/clicksend_tts/test_notify.py b/tests/components/clicksend_tts/test_notify.py index e73f0576d9e..892d7541354 100644 --- a/tests/components/clicksend_tts/test_notify.py +++ b/tests/components/clicksend_tts/test_notify.py @@ -46,7 +46,7 @@ def mock_clicksend_tts_notify(): yield ns -async def setup_notify(hass): +async def setup_notify(hass: HomeAssistant) -> None: """Test setup.""" with assert_setup_component(1, notify.DOMAIN) as config: assert await async_setup_component(hass, notify.DOMAIN, CONFIG) diff --git a/tests/components/climate/common.py b/tests/components/climate/common.py index c890d3a7bb5..d6aedd23671 100644 --- a/tests/components/climate/common.py +++ b/tests/components/climate/common.py @@ -23,6 +23,7 @@ from homeassistant.components.climate import ( SERVICE_SET_SWING_MODE, SERVICE_SET_TEMPERATURE, ) +from homeassistant.components.climate.const import HVACMode from homeassistant.const import ( ATTR_ENTITY_ID, ATTR_TEMPERATURE, @@ -30,10 +31,13 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass -async def async_set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_preset_mode( + hass: HomeAssistant, preset_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new preset mode.""" data = {ATTR_PRESET_MODE: preset_mode} @@ -44,7 +48,9 @@ async def async_set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): +def set_preset_mode( + hass: HomeAssistant, preset_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new preset mode.""" data = {ATTR_PRESET_MODE: preset_mode} @@ -54,7 +60,9 @@ def set_preset_mode(hass, preset_mode, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_PRESET_MODE, data) -async def async_set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): +async def async_set_aux_heat( + hass: HomeAssistant, aux_heat: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified climate devices auxiliary heater on.""" data = {ATTR_AUX_HEAT: aux_heat} @@ -65,7 +73,9 @@ async def async_set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): +def set_aux_heat( + hass: HomeAssistant, aux_heat: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified climate devices auxiliary heater on.""" data = {ATTR_AUX_HEAT: aux_heat} @@ -76,13 +86,13 @@ def set_aux_heat(hass, aux_heat, entity_id=ENTITY_MATCH_ALL): async def async_set_temperature( - hass, - temperature=None, - entity_id=ENTITY_MATCH_ALL, - target_temp_high=None, - target_temp_low=None, - hvac_mode=None, -): + hass: HomeAssistant, + temperature: float | None = None, + entity_id: str = ENTITY_MATCH_ALL, + target_temp_high: float | None = None, + target_temp_low: float | None = None, + hvac_mode: HVACMode | None = None, +) -> None: """Set new target temperature.""" kwargs = { key: value @@ -103,13 +113,13 @@ async def async_set_temperature( @bind_hass def set_temperature( - hass, - temperature=None, - entity_id=ENTITY_MATCH_ALL, - target_temp_high=None, - target_temp_low=None, - hvac_mode=None, -): + hass: HomeAssistant, + temperature: float | None = None, + entity_id: str = ENTITY_MATCH_ALL, + target_temp_high: float | None = None, + target_temp_low: float | None = None, + hvac_mode: HVACMode | None = None, +) -> None: """Set new target temperature.""" kwargs = { key: value @@ -126,7 +136,9 @@ def set_temperature( hass.services.call(DOMAIN, SERVICE_SET_TEMPERATURE, kwargs) -async def async_set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): +async def async_set_humidity( + hass: HomeAssistant, humidity: int, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target humidity.""" data = {ATTR_HUMIDITY: humidity} @@ -137,7 +149,9 @@ async def async_set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): +def set_humidity( + hass: HomeAssistant, humidity: int, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target humidity.""" data = {ATTR_HUMIDITY: humidity} @@ -147,7 +161,9 @@ def set_humidity(hass, humidity, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_HUMIDITY, data) -async def async_set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): +async def async_set_fan_mode( + hass: HomeAssistant, fan: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set all or specified climate devices fan mode on.""" data = {ATTR_FAN_MODE: fan} @@ -158,7 +174,9 @@ async def async_set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): +def set_fan_mode( + hass: HomeAssistant, fan: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set all or specified climate devices fan mode on.""" data = {ATTR_FAN_MODE: fan} @@ -168,7 +186,9 @@ def set_fan_mode(hass, fan, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_FAN_MODE, data) -async def async_set_hvac_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_hvac_mode( + hass: HomeAssistant, hvac_mode: HVACMode, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target operation mode.""" data = {ATTR_HVAC_MODE: hvac_mode} @@ -179,7 +199,9 @@ async def async_set_hvac_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_operation_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): +def set_operation_mode( + hass: HomeAssistant, hvac_mode: HVACMode, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target operation mode.""" data = {ATTR_HVAC_MODE: hvac_mode} @@ -189,7 +211,9 @@ def set_operation_mode(hass, hvac_mode, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_HVAC_MODE, data) -async def async_set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_swing_mode( + hass: HomeAssistant, swing_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target swing mode.""" data = {ATTR_SWING_MODE: swing_mode} @@ -200,7 +224,9 @@ async def async_set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): +def set_swing_mode( + hass: HomeAssistant, swing_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target swing mode.""" data = {ATTR_SWING_MODE: swing_mode} @@ -210,7 +236,7 @@ def set_swing_mode(hass, swing_mode, entity_id=ENTITY_MATCH_ALL): hass.services.call(DOMAIN, SERVICE_SET_SWING_MODE, data) -async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn on device.""" data = {} @@ -220,7 +246,9 @@ async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn off device.""" data = {} diff --git a/tests/components/climate/conftest.py b/tests/components/climate/conftest.py index fd4368c4219..4ade8606e77 100644 --- a/tests/components/climate/conftest.py +++ b/tests/components/climate/conftest.py @@ -4,10 +4,18 @@ from collections.abc import Generator import pytest -from homeassistant.config_entries import ConfigFlow +from homeassistant.components.climate import DOMAIN as CLIMATE_DOMAIN +from homeassistant.config_entries import ConfigEntry, ConfigFlow +from homeassistant.const import Platform from homeassistant.core import HomeAssistant -from tests.common import mock_config_flow, mock_platform +from tests.common import ( + MockConfigEntry, + MockModule, + mock_config_flow, + mock_integration, + mock_platform, +) class MockFlow(ConfigFlow): @@ -21,3 +29,41 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: with mock_config_flow("test", MockFlow): yield + + +@pytest.fixture +def register_test_integration( + hass: HomeAssistant, config_flow_fixture: None +) -> Generator: + """Provide a mocked integration for tests.""" + + config_entry = MockConfigEntry(domain="test") + config_entry.add_to_hass(hass) + + async def help_async_setup_entry_init( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Set up test config entry.""" + await hass.config_entries.async_forward_entry_setups( + config_entry, [CLIMATE_DOMAIN] + ) + return True + + async def help_async_unload_entry( + hass: HomeAssistant, config_entry: ConfigEntry + ) -> bool: + """Unload test config emntry.""" + return await hass.config_entries.async_unload_platforms( + config_entry, [Platform.CLIMATE] + ) + + mock_integration( + hass, + MockModule( + "test", + async_setup_entry=help_async_setup_entry_init, + async_unload_entry=help_async_unload_entry, + ), + ) + + return config_entry diff --git a/tests/components/climate/test_init.py b/tests/components/climate/test_init.py index f306551e540..256ecf92b1d 100644 --- a/tests/components/climate/test_init.py +++ b/tests/components/climate/test_init.py @@ -56,6 +56,7 @@ from tests.common import ( import_and_test_deprecated_constant_enum, mock_integration, mock_platform, + setup_test_component_platform, ) @@ -237,42 +238,15 @@ def test_deprecated_current_constants( async def test_preset_mode_validation( - hass: HomeAssistant, config_flow_fixture: None + hass: HomeAssistant, register_test_integration: MockConfigEntry ) -> None: """Test mode validation for fan, swing and preset.""" + climate_entity = MockClimateEntity(name="test", entity_id="climate.test") - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities([MockClimateEntity(name="test", entity_id="climate.test")]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) - - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -402,7 +376,9 @@ def test_deprecated_supported_features_ints( async def test_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test adding feature flag and warn if missing when methods are set.""" @@ -419,43 +395,15 @@ async def test_warning_not_implemented_turn_on_off_feature( """Turn off.""" called.append("turn_off") - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -499,7 +447,9 @@ async def test_warning_not_implemented_turn_on_off_feature( async def test_implicit_warning_not_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test adding feature flag and warn if missing when methods are not set. @@ -527,43 +477,15 @@ async def test_implicit_warning_not_implemented_turn_on_off_feature( """ return [HVACMode.OFF, HVACMode.HEAT] - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -579,7 +501,9 @@ async def test_implicit_warning_not_implemented_turn_on_off_feature( async def test_no_warning_implemented_turn_on_off_feature( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test no warning when feature flags are set.""" @@ -594,43 +518,15 @@ async def test_no_warning_implemented_turn_on_off_feature( | ClimateEntityFeature.TURN_ON ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -651,7 +547,9 @@ async def test_no_warning_implemented_turn_on_off_feature( async def test_no_warning_integration_has_migrated( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test no warning when integration migrated using `_enable_turn_on_off_backwards_compatibility`.""" @@ -665,43 +563,15 @@ async def test_no_warning_integration_has_migrated( | ClimateEntityFeature.SWING_MODE ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -722,7 +592,9 @@ async def test_no_warning_integration_has_migrated( async def test_no_warning_integration_implement_feature_flags( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, config_flow_fixture: None + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + register_test_integration: MockConfigEntry, ) -> None: """Test no warning when integration uses the correct feature flags.""" @@ -737,43 +609,15 @@ async def test_no_warning_integration_implement_feature_flags( | ClimateEntityFeature.TURN_ON ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTest(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) + climate_entity = MockClimateEntityTest(name="test", entity_id="climate.test") with patch.object( MockClimateEntityTest, "__module__", "tests.custom_components.climate.test_init" ): - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") @@ -1022,7 +866,7 @@ async def test_issue_aux_property_deprecated( async def test_no_issue_aux_property_deprecated_for_core( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - config_flow_fixture: None, + register_test_integration: MockConfigEntry, manifest_extra: dict[str, str], translation_key: str, translation_placeholders_extra: dict[str, str], @@ -1061,39 +905,10 @@ async def test_no_issue_aux_property_deprecated_for_core( entity_id="climate.testing", ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test weather platform via config entry.""" - async_add_entities([climate_entity]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - partial_manifest=manifest_extra, - ), - built_in=False, + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) - - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() assert climate_entity.state == HVACMode.HEAT @@ -1111,7 +926,7 @@ async def test_no_issue_aux_property_deprecated_for_core( async def test_no_issue_no_aux_property( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - config_flow_fixture: None, + register_test_integration: MockConfigEntry, issue_registry: ir.IssueRegistry, ) -> None: """Test the issue is raised on deprecated auxiliary heater attributes.""" @@ -1121,38 +936,10 @@ async def test_no_issue_no_aux_property( entity_id="climate.testing", ) - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test weather platform via config entry.""" - async_add_entities([climate_entity]) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, + setup_test_component_platform( + hass, DOMAIN, entities=[climate_entity], from_config_entry=True ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), - ) - - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() assert climate_entity.state == HVACMode.HEAT @@ -1167,7 +954,7 @@ async def test_no_issue_no_aux_property( async def test_temperature_validation( - hass: HomeAssistant, config_flow_fixture: None + hass: HomeAssistant, register_test_integration: MockConfigEntry ) -> None: """Test validation for temperatures.""" @@ -1194,40 +981,15 @@ async def test_temperature_validation( self._attr_target_temperature_high = kwargs[ATTR_TARGET_TEMP_HIGH] self._attr_target_temperature_low = kwargs[ATTR_TARGET_TEMP_LOW] - async def async_setup_entry_init( - hass: HomeAssistant, config_entry: ConfigEntry - ) -> bool: - """Set up test config entry.""" - await hass.config_entries.async_forward_entry_setups(config_entry, [DOMAIN]) - return True - - async def async_setup_entry_climate_platform( - hass: HomeAssistant, - config_entry: ConfigEntry, - async_add_entities: AddEntitiesCallback, - ) -> None: - """Set up test climate platform via config entry.""" - async_add_entities( - [MockClimateEntityTemp(name="test", entity_id="climate.test")] - ) - - mock_integration( - hass, - MockModule( - "test", - async_setup_entry=async_setup_entry_init, - ), - built_in=False, - ) - mock_platform( - hass, - "test.climate", - MockPlatform(async_setup_entry=async_setup_entry_climate_platform), + test_climate = MockClimateEntityTemp( + name="Test", + unique_id="unique_climate_test", ) - config_entry = MockConfigEntry(domain="test") - config_entry.add_to_hass(hass) - assert await hass.config_entries.async_setup(config_entry.entry_id) + setup_test_component_platform( + hass, DOMAIN, entities=[test_climate], from_config_entry=True + ) + await hass.config_entries.async_setup(register_test_integration.entry_id) await hass.async_block_till_done() state = hass.states.get("climate.test") diff --git a/tests/components/cloud/conftest.py b/tests/components/cloud/conftest.py index 3a5d333f9b8..2edd9571bdd 100644 --- a/tests/components/cloud/conftest.py +++ b/tests/components/cloud/conftest.py @@ -187,9 +187,8 @@ def set_cloud_prefs_fixture( @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/cloud/test_client.py b/tests/components/cloud/test_client.py index 005efd990fb..7af163cc49d 100644 --- a/tests/components/cloud/test_client.py +++ b/tests/components/cloud/test_client.py @@ -208,7 +208,9 @@ async def test_webhook_msg( received = [] - async def handler(hass, webhook_id, request): + async def handler( + hass: HomeAssistant, webhook_id: str, request: web.Request + ) -> web.Response: """Handle a webhook.""" received.append(request) return web.json_response({"from": "handler"}) diff --git a/tests/components/cloudflare/test_config_flow.py b/tests/components/cloudflare/test_config_flow.py index 1278113c0c7..f34a423833c 100644 --- a/tests/components/cloudflare/test_config_flow.py +++ b/tests/components/cloudflare/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock import pycfdns from homeassistant.components.cloudflare.const import CONF_RECORDS, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_TOKEN, CONF_SOURCE, CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -151,15 +151,7 @@ async def test_reauth_flow(hass: HomeAssistant, cfupdate_flow: MagicMock) -> Non entry = MockConfigEntry(domain=DOMAIN, data=ENTRY_CONFIG) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/coinbase/common.py b/tests/components/coinbase/common.py index 2768b6a2cd4..0a2475ac218 100644 --- a/tests/components/coinbase/common.py +++ b/tests/components/coinbase/common.py @@ -6,6 +6,7 @@ from homeassistant.components.coinbase.const import ( DOMAIN, ) from homeassistant.const import CONF_API_KEY, CONF_API_TOKEN, CONF_API_VERSION +from homeassistant.core import HomeAssistant from .const import ( GOOD_CURRENCY_2, @@ -21,7 +22,7 @@ from tests.common import MockConfigEntry class MockPagination: """Mock pagination result.""" - def __init__(self, value=None): + def __init__(self, value=None) -> None: """Load simple pagination for tests.""" self.next_starting_after = value @@ -29,7 +30,7 @@ class MockPagination: class MockGetAccounts: """Mock accounts with pagination.""" - def __init__(self, starting_after=0): + def __init__(self, starting_after=0) -> None: """Init mocked object, forced to return two at a time.""" if (target_end := starting_after + 2) >= ( max_end := len(MOCK_ACCOUNTS_RESPONSE) @@ -58,7 +59,7 @@ def mocked_get_accounts(_, **kwargs): class MockGetAccountsV3: """Mock accounts with pagination.""" - def __init__(self, cursor=""): + def __init__(self, cursor="") -> None: """Init mocked object, forced to return two at a time.""" ids = [account["uuid"] for account in MOCK_ACCOUNTS_RESPONSE_V3] start = ids.index(cursor) if cursor else 0 @@ -115,7 +116,11 @@ def mock_get_portfolios(): } -async def init_mock_coinbase(hass, currencies=None, rates=None): +async def init_mock_coinbase( + hass: HomeAssistant, + currencies: list[str] | None = None, + rates: list[str] | None = None, +) -> MockConfigEntry: """Init Coinbase integration for testing.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -136,7 +141,11 @@ async def init_mock_coinbase(hass, currencies=None, rates=None): return config_entry -async def init_mock_coinbase_v3(hass, currencies=None, rates=None): +async def init_mock_coinbase_v3( + hass: HomeAssistant, + currencies: list[str] | None = None, + rates: list[str] | None = None, +) -> MockConfigEntry: """Init Coinbase integration for testing.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/comelit/test_config_flow.py b/tests/components/comelit/test_config_flow.py index 333bf09bd20..eeaea0e41e9 100644 --- a/tests/components/comelit/test_config_flow.py +++ b/tests/components/comelit/test_config_flow.py @@ -7,7 +7,7 @@ from aiocomelit import CannotAuthenticate, CannotConnect import pytest from homeassistant.components.comelit.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PIN, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -100,6 +100,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -113,15 +116,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: ): mock_request_get.return_value.status_code = 200 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -147,6 +141,9 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_BRIDGE_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch("aiocomelit.api.ComeliteSerialBridgeApi.login", side_effect=side_effect), @@ -155,15 +152,6 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> ), patch("homeassistant.components.comelit.async_setup_entry"), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/command_line/test_binary_sensor.py b/tests/components/command_line/test_binary_sensor.py index fd726ab77a4..5d1cd845e27 100644 --- a/tests/components/command_line/test_binary_sensor.py +++ b/tests/components/command_line/test_binary_sensor.py @@ -56,6 +56,24 @@ async def test_setup_integration_yaml( assert entity_state.name == "Test" +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "binary_sensor", + { + "binary_sensor": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_cover.py b/tests/components/command_line/test_cover.py index 7ed48909d79..b81d915c6d5 100644 --- a/tests/components/command_line/test_cover.py +++ b/tests/components/command_line/test_cover.py @@ -36,6 +36,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "cover", + { + "cover": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + async def test_no_poll_when_cover_has_no_command_state(hass: HomeAssistant) -> None: """Test that the cover does not polls when there's no state command.""" diff --git a/tests/components/command_line/test_notify.py b/tests/components/command_line/test_notify.py index c775d87fedb..6898b44f062 100644 --- a/tests/components/command_line/test_notify.py +++ b/tests/components/command_line/test_notify.py @@ -16,6 +16,24 @@ from homeassistant.components.notify import DOMAIN as NOTIFY_DOMAIN from homeassistant.core import HomeAssistant +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "notify", + { + "notify": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_sensor.py b/tests/components/command_line/test_sensor.py index eeccf2c358e..f7879b334cd 100644 --- a/tests/components/command_line/test_sensor.py +++ b/tests/components/command_line/test_sensor.py @@ -27,6 +27,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "sensor", + { + "sensor": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + @pytest.mark.parametrize( "get_config", [ diff --git a/tests/components/command_line/test_switch.py b/tests/components/command_line/test_switch.py index c464ded34fb..549e729892c 100644 --- a/tests/components/command_line/test_switch.py +++ b/tests/components/command_line/test_switch.py @@ -37,6 +37,24 @@ from . import mock_asyncio_subprocess_run from tests.common import async_fire_time_changed +async def test_setup_platform_yaml(hass: HomeAssistant) -> None: + """Test setting up the platform with platform yaml.""" + await setup.async_setup_component( + hass, + "switch", + { + "switch": { + "platform": "command_line", + "command": "echo 1", + "payload_on": "1", + "payload_off": "0", + } + }, + ) + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 + + async def test_state_integration_yaml(hass: HomeAssistant) -> None: """Test with none state.""" with tempfile.TemporaryDirectory() as tempdirname: diff --git a/tests/components/config/test_device_registry.py b/tests/components/config/test_device_registry.py index aab898f5fd6..c840ce2bed2 100644 --- a/tests/components/config/test_device_registry.py +++ b/tests/components/config/test_device_registry.py @@ -7,6 +7,7 @@ import pytest from pytest_unordered import unordered from homeassistant.components.config import device_registry +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -274,7 +275,9 @@ async def test_remove_config_entry_from_device( can_remove = False - async def async_remove_config_entry_device(hass, config_entry, device_entry): + async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + ) -> bool: return can_remove mock_integration( @@ -356,7 +359,9 @@ async def test_remove_config_entry_from_device_fails( assert await async_setup_component(hass, "config", {}) ws_client = await hass_ws_client(hass) - async def async_remove_config_entry_device(hass, config_entry, device_entry): + async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + ) -> bool: return True mock_integration( @@ -473,7 +478,9 @@ async def test_remove_config_entry_from_device_if_integration_remove( can_remove = False - async def async_remove_config_entry_device(hass, config_entry, device_entry): + async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + ) -> bool: if can_remove: device_registry.async_update_device( device_entry.id, remove_config_entry_id=config_entry.entry_id diff --git a/tests/components/conftest.py b/tests/components/conftest.py index 7d15bde88c0..39ff7071dc4 100644 --- a/tests/components/conftest.py +++ b/tests/components/conftest.py @@ -6,7 +6,7 @@ from collections.abc import Callable, Generator from importlib.util import find_spec from pathlib import Path from typing import TYPE_CHECKING, Any -from unittest.mock import MagicMock, patch +from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -14,6 +14,8 @@ from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant if TYPE_CHECKING: + from homeassistant.components.hassio.addon_manager import AddonManager + from .conversation import MockAgent from .device_tracker.common import MockScanner from .light.common import MockLight @@ -180,3 +182,230 @@ def mock_legacy_device_tracker_setup() -> Callable[[HomeAssistant, MockScanner], from .device_tracker.common import mock_legacy_device_tracker_setup return mock_legacy_device_tracker_setup + + +@pytest.fixture(name="addon_manager") +def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: + """Return an AddonManager instance.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_manager + + return mock_addon_manager(hass) + + +@pytest.fixture(name="discovery_info") +def discovery_info_fixture() -> Any: + """Return the discovery info from the supervisor.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_discovery_info + + return mock_discovery_info() + + +@pytest.fixture(name="discovery_info_side_effect") +def discovery_info_side_effect_fixture() -> Any | None: + """Return the discovery info from the supervisor.""" + return None + + +@pytest.fixture(name="get_addon_discovery_info") +def get_addon_discovery_info_fixture( + discovery_info: dict[str, Any], discovery_info_side_effect: Any | None +) -> Generator[AsyncMock]: + """Mock get add-on discovery info.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_get_addon_discovery_info + + yield from mock_get_addon_discovery_info(discovery_info, discovery_info_side_effect) + + +@pytest.fixture(name="addon_store_info_side_effect") +def addon_store_info_side_effect_fixture() -> Any | None: + """Return the add-on store info side effect.""" + return None + + +@pytest.fixture(name="addon_store_info") +def addon_store_info_fixture( + addon_store_info_side_effect: Any | None, +) -> Generator[AsyncMock]: + """Mock Supervisor add-on store info.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_store_info + + yield from mock_addon_store_info(addon_store_info_side_effect) + + +@pytest.fixture(name="addon_info_side_effect") +def addon_info_side_effect_fixture() -> Any | None: + """Return the add-on info side effect.""" + return None + + +@pytest.fixture(name="addon_info") +def addon_info_fixture(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: + """Mock Supervisor add-on info.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_info + + yield from mock_addon_info(addon_info_side_effect) + + +@pytest.fixture(name="addon_not_installed") +def addon_not_installed_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on not installed.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_not_installed + + return mock_addon_not_installed(addon_store_info, addon_info) + + +@pytest.fixture(name="addon_installed") +def addon_installed_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already installed but not running.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_installed + + return mock_addon_installed(addon_store_info, addon_info) + + +@pytest.fixture(name="addon_running") +def addon_running_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already running.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_running + + return mock_addon_running(addon_store_info, addon_info) + + +@pytest.fixture(name="install_addon_side_effect") +def install_addon_side_effect_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: + """Return the install add-on side effect.""" + + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_install_addon_side_effect + + return mock_install_addon_side_effect(addon_store_info, addon_info) + + +@pytest.fixture(name="install_addon") +def install_addon_fixture( + install_addon_side_effect: Any | None, +) -> Generator[AsyncMock]: + """Mock install add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_install_addon + + yield from mock_install_addon(install_addon_side_effect) + + +@pytest.fixture(name="start_addon_side_effect") +def start_addon_side_effect_fixture( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: + """Return the start add-on options side effect.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_start_addon_side_effect + + return mock_start_addon_side_effect(addon_store_info, addon_info) + + +@pytest.fixture(name="start_addon") +def start_addon_fixture(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: + """Mock start add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_start_addon + + yield from mock_start_addon(start_addon_side_effect) + + +@pytest.fixture(name="restart_addon_side_effect") +def restart_addon_side_effect_fixture() -> Any | None: + """Return the restart add-on options side effect.""" + return None + + +@pytest.fixture(name="restart_addon") +def restart_addon_fixture( + restart_addon_side_effect: Any | None, +) -> Generator[AsyncMock]: + """Mock restart add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_restart_addon + + yield from mock_restart_addon(restart_addon_side_effect) + + +@pytest.fixture(name="stop_addon") +def stop_addon_fixture() -> Generator[AsyncMock]: + """Mock stop add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_stop_addon + + yield from mock_stop_addon() + + +@pytest.fixture(name="addon_options") +def addon_options_fixture(addon_info: AsyncMock) -> dict[str, Any]: + """Mock add-on options.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_addon_options + + return mock_addon_options(addon_info) + + +@pytest.fixture(name="set_addon_options_side_effect") +def set_addon_options_side_effect_fixture( + addon_options: dict[str, Any], +) -> Any | None: + """Return the set add-on options side effect.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_set_addon_options_side_effect + + return mock_set_addon_options_side_effect(addon_options) + + +@pytest.fixture(name="set_addon_options") +def set_addon_options_fixture( + set_addon_options_side_effect: Any | None, +) -> Generator[AsyncMock]: + """Mock set add-on options.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_set_addon_options + + yield from mock_set_addon_options(set_addon_options_side_effect) + + +@pytest.fixture(name="uninstall_addon") +def uninstall_addon_fixture() -> Generator[AsyncMock]: + """Mock uninstall add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_uninstall_addon + + yield from mock_uninstall_addon() + + +@pytest.fixture(name="create_backup") +def create_backup_fixture() -> Generator[AsyncMock]: + """Mock create backup.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_create_backup + + yield from mock_create_backup() + + +@pytest.fixture(name="update_addon") +def update_addon_fixture() -> Generator[AsyncMock]: + """Mock update add-on.""" + # pylint: disable-next=import-outside-toplevel + from .hassio.common import mock_update_addon + + yield from mock_update_addon() diff --git a/tests/components/conversation/snapshots/test_default_agent.ambr b/tests/components/conversation/snapshots/test_default_agent.ambr index d015b19ddc1..051613f0300 100644 --- a/tests/components/conversation/snapshots/test_default_agent.ambr +++ b/tests/components/conversation/snapshots/test_default_agent.ambr @@ -344,126 +344,6 @@ }), }) # --- -# name: test_intent_entity_exposed.1 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called my cool light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called my cool light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed.4 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_exposed.5 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- # name: test_intent_entity_fail_if_unexposed dict({ 'conversation_id': None, @@ -614,73 +494,3 @@ }), }) # --- -# name: test_intent_entity_renamed.2 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called kitchen light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_renamed.3 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'failed': list([ - ]), - 'success': list([ - dict({ - 'id': 'light.kitchen', - 'name': 'kitchen light', - 'type': , - }), - ]), - 'targets': list([ - ]), - }), - 'language': 'en', - 'response_type': 'action_done', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Turned on the light', - }), - }), - }), - }) -# --- -# name: test_intent_entity_renamed.4 - dict({ - 'conversation_id': None, - 'response': dict({ - 'card': dict({ - }), - 'data': dict({ - 'code': 'no_valid_targets', - }), - 'language': 'en', - 'response_type': 'error', - 'speech': dict({ - 'plain': dict({ - 'extra_data': None, - 'speech': 'Sorry, I am not aware of any device called renamed light', - }), - }), - }), - }) -# --- diff --git a/tests/components/conversation/test_agent_manager.py b/tests/components/conversation/test_agent_manager.py new file mode 100644 index 00000000000..47b58a522a8 --- /dev/null +++ b/tests/components/conversation/test_agent_manager.py @@ -0,0 +1,34 @@ +"""Test agent manager.""" + +from unittest.mock import patch + +from homeassistant.components.conversation import ConversationResult, async_converse +from homeassistant.core import Context, HomeAssistant +from homeassistant.helpers.intent import IntentResponse + + +async def test_async_converse(hass: HomeAssistant, init_components) -> None: + """Test the async_converse method.""" + context = Context() + with patch( + "homeassistant.components.conversation.default_agent.DefaultAgent.async_process", + return_value=ConversationResult(response=IntentResponse(language="test lang")), + ) as mock_process: + await async_converse( + hass, + text="test command", + conversation_id="test id", + context=context, + language="test lang", + agent_id="conversation.home_assistant", + device_id="test device id", + ) + + assert mock_process.called + conversation_input = mock_process.call_args[0][0] + assert conversation_input.text == "test command" + assert conversation_input.conversation_id == "test id" + assert conversation_input.context is context + assert conversation_input.language == "test lang" + assert conversation_input.agent_id == "conversation.home_assistant" + assert conversation_input.device_id == "test device id" diff --git a/tests/components/conversation/test_default_agent.py b/tests/components/conversation/test_default_agent.py index 315b73bacfd..935ef205d4f 100644 --- a/tests/components/conversation/test_default_agent.py +++ b/tests/components/conversation/test_default_agent.py @@ -308,6 +308,72 @@ async def test_unexposed_entities_skipped( assert result.response.matched_states[0].entity_id == exposed_light.entity_id +@pytest.mark.usefixtures("init_components") +async def test_duplicated_names_resolved_with_device_area( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test entities deduplication with device ID context.""" + area_kitchen = area_registry.async_get_or_create("kitchen_id") + area_bedroom = area_registry.async_get_or_create("bedroom_id") + + kitchen_light = entity_registry.async_get_or_create("light", "demo", "1234") + bedroom_light = entity_registry.async_get_or_create("light", "demo", "5678") + + # Same name and alias + for light in (kitchen_light, bedroom_light): + light = entity_registry.async_update_entity( + light.entity_id, + name="top light", + aliases={"overhead light"}, + ) + hass.states.async_set( + light.entity_id, + "off", + attributes={ATTR_FRIENDLY_NAME: light.name}, + ) + # Different areas + kitchen_light = entity_registry.async_update_entity( + kitchen_light.entity_id, + area_id=area_kitchen.id, + ) + bedroom_light = entity_registry.async_update_entity( + bedroom_light.entity_id, + area_id=area_bedroom.id, + ) + + # Pipeline device in bedroom area + entry = MockConfigEntry() + entry.add_to_hass(hass) + assist_device = device_registry.async_get_or_create( + config_entry_id=entry.entry_id, + connections=set(), + identifiers={("demo", "id-1234")}, + ) + assist_device = device_registry.async_update_device( + assist_device.id, + area_id=area_bedroom.id, + ) + + # Check name and alias + for name in ("top light", "overhead light"): + # Only one light should be turned on + calls = async_mock_service(hass, "light", "turn_on") + result = await conversation.async_converse( + hass, f"turn on {name}", None, Context(), device_id=assist_device.id + ) + + assert len(calls) == 1 + assert calls[0].data["entity_id"][0] == bedroom_light.entity_id + + assert result.response.response_type == intent.IntentResponseType.ACTION_DONE + assert result.response.intent is not None + assert result.response.intent.slots.get("name", {}).get("value") == name + assert result.response.intent.slots.get("name", {}).get("text") == name + + @pytest.mark.usefixtures("init_components") async def test_trigger_sentences(hass: HomeAssistant) -> None: """Test registering/unregistering/matching a few trigger sentences.""" diff --git a/tests/components/counter/common.py b/tests/components/counter/common.py index b5156c1a432..e5d9316cd22 100644 --- a/tests/components/counter/common.py +++ b/tests/components/counter/common.py @@ -11,13 +11,13 @@ from homeassistant.components.counter import ( SERVICE_RESET, ) from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.loader import bind_hass @callback @bind_hass -def async_increment(hass, entity_id): +def async_increment(hass: HomeAssistant, entity_id: str) -> None: """Increment a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_INCREMENT, {ATTR_ENTITY_ID: entity_id}) @@ -26,7 +26,7 @@ def async_increment(hass, entity_id): @callback @bind_hass -def async_decrement(hass, entity_id): +def async_decrement(hass: HomeAssistant, entity_id: str) -> None: """Decrement a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_DECREMENT, {ATTR_ENTITY_ID: entity_id}) @@ -35,7 +35,7 @@ def async_decrement(hass, entity_id): @callback @bind_hass -def async_reset(hass, entity_id): +def async_reset(hass: HomeAssistant, entity_id: str) -> None: """Reset a counter.""" hass.async_create_task( hass.services.async_call(DOMAIN, SERVICE_RESET, {ATTR_ENTITY_ID: entity_id}) diff --git a/tests/components/cover/test_init.py b/tests/components/cover/test_init.py index 37740260c2f..d1d84ffad6c 100644 --- a/tests/components/cover/test_init.py +++ b/tests/components/cover/test_init.py @@ -14,7 +14,8 @@ from homeassistant.const import ( STATE_OPEN, STATE_OPENING, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse +from homeassistant.helpers.entity import Entity from homeassistant.setup import async_setup_component from .common import MockCover @@ -119,7 +120,7 @@ async def test_services( assert is_closing(hass, ent5) -def call_service(hass, service, ent): +def call_service(hass: HomeAssistant, service: str, ent: Entity) -> ServiceResponse: """Call any service on entity.""" return hass.services.async_call( cover.DOMAIN, service, {ATTR_ENTITY_ID: ent.entity_id}, blocking=True @@ -136,22 +137,22 @@ def set_state(ent, state) -> None: ent._values["state"] = state -def is_open(hass, ent): +def is_open(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_OPEN) -def is_opening(hass, ent): +def is_opening(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_OPENING) -def is_closed(hass, ent): +def is_closed(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_CLOSED) -def is_closing(hass, ent): +def is_closing(hass: HomeAssistant, ent: Entity) -> bool: """Return if the cover is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_CLOSING) diff --git a/tests/components/deconz/snapshots/test_light.ambr b/tests/components/deconz/snapshots/test_light.ambr index 46b6611dcbe..b5a9f7b5543 100644 --- a/tests/components/deconz/snapshots/test_light.ambr +++ b/tests/components/deconz/snapshots/test_light.ambr @@ -1,308 +1,4 @@ # serializer version: 1 -# name: test_groups[input0-expected0-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 255, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input0-expected0-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_groups[input0-light_payload0][light.dimmable_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -607,310 +303,6 @@ 'state': 'on', }) # --- -# name: test_groups[input1-expected1-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 50, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input1-expected1-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_groups[input1-light_payload0][light.dimmable_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1215,310 +607,6 @@ 'state': 'on', }) # --- -# name: test_groups[input2-expected2-light_payload0][light.dimmable_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.dimmable_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Dimmable light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:02-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.dimmable_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 255, - 'color_mode': , - 'friendly_name': 'Dimmable light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.dimmable_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.group-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.group', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '01234E56789A-/groups/0', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.group-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'all_on': False, - 'brightness': 50, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Group', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': True, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.group', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.rgb_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.rgb_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'RGB light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.rgb_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 50, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'RGB light', - 'hs_color': tuple( - 52.0, - 100.0, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 255, - 221, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.5, - 0.5, - ), - }), - 'context': , - 'entity_id': 'light.rgb_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.tunable_white_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.tunable_white_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tunable white light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:01-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_groups[input2-expected2-light_payload0][light.tunable_white_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': None, - 'color_mode': , - 'color_temp': 2500, - 'color_temp_kelvin': 400, - 'friendly_name': 'Tunable white light', - 'hs_color': tuple( - 15.981, - 100.0, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6451, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 155, - 'rgb_color': tuple( - 255, - 67, - 0, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.674, - 0.322, - ), - }), - 'context': , - 'entity_id': 'light.tunable_white_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_groups[input2-light_payload0][light.dimmable_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1823,97 +911,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload0-expected0][light.hue_go-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_go', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Go', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-00', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload0-expected0][light.hue_go-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': 375, - 'color_temp_kelvin': 2666, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Hue Go', - 'hs_color': tuple( - 28.47, - 66.821, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 165, - 84, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.53, - 0.388, - ), - }), - 'context': , - 'entity_id': 'light.hue_go', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload0][light.hue_go-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2005,97 +1002,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload1-expected1][light.hue_ensis-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 7142, - 'max_mireds': 650, - 'min_color_temp_kelvin': 1538, - 'min_mireds': 140, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_ensis', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Ensis', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload1-expected1][light.hue_ensis-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Hue Ensis', - 'hs_color': tuple( - 29.691, - 38.039, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 7142, - 'max_mireds': 650, - 'min_color_temp_kelvin': 1538, - 'min_mireds': 140, - 'rgb_color': tuple( - 255, - 206, - 158, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.427, - 0.373, - ), - }), - 'context': , - 'entity_id': 'light.hue_ensis', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload1][light.hue_ensis-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2187,113 +1093,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload2-expected2][light.lidl_xmas_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'carnival', - 'collide', - 'fading', - 'fireworks', - 'flag', - 'glow', - 'rainbow', - 'snake', - 'snow', - 'sparkles', - 'steady', - 'strobe', - 'twinkle', - 'updown', - 'vintage', - 'waves', - ]), - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.lidl_xmas_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'LIDL xmas light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '58:8e:81:ff:fe:db:7b:be-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload2-expected2][light.lidl_xmas_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 25, - 'color_mode': , - 'effect': None, - 'effect_list': list([ - 'carnival', - 'collide', - 'fading', - 'fireworks', - 'flag', - 'glow', - 'rainbow', - 'snake', - 'snow', - 'sparkles', - 'steady', - 'strobe', - 'twinkle', - 'updown', - 'vintage', - 'waves', - ]), - 'friendly_name': 'LIDL xmas light', - 'hs_color': tuple( - 294.938, - 55.294, - ), - 'is_deconz_group': False, - 'rgb_color': tuple( - 243, - 113, - 255, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.357, - 0.188, - ), - }), - 'context': , - 'entity_id': 'light.lidl_xmas_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload2][light.lidl_xmas_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2401,86 +1200,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload3-expected3][light.hue_white_ambiance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'max_color_temp_kelvin': 6535, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_white_ambiance', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue White Ambiance', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-02', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload3-expected3][light.hue_white_ambiance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'color_temp': 396, - 'color_temp_kelvin': 2525, - 'friendly_name': 'Hue White Ambiance', - 'hs_color': tuple( - 28.809, - 71.624, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 454, - 'min_color_temp_kelvin': 2202, - 'min_mireds': 153, - 'rgb_color': tuple( - 255, - 160, - 72, - ), - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.544, - 0.389, - ), - }), - 'context': , - 'entity_id': 'light.hue_white_ambiance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload3][light.hue_white_ambiance-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2561,63 +1280,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload4-expected4][light.hue_filament-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.hue_filament', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Hue Filament', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:01:23:45:67-03', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload4-expected4][light.hue_filament-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 254, - 'color_mode': , - 'friendly_name': 'Hue Filament', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.hue_filament', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload4][light.hue_filament-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2675,62 +1337,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload5-expected5][light.simple_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'supported_color_modes': list([ - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.simple_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Simple Light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:01:23:45:67-01', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload5-expected5][light.simple_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'color_mode': , - 'friendly_name': 'Simple Light', - 'is_deconz_group': False, - 'supported_color_modes': list([ - , - ]), - 'supported_features': , - }), - 'context': , - 'entity_id': 'light.simple_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload5][light.simple_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -2787,97 +1393,6 @@ 'state': 'on', }) # --- -# name: test_lights[light_payload6-expected6][light.gradient_light-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'effect_list': list([ - 'colorloop', - ]), - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'supported_color_modes': list([ - , - , - , - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'light', - 'entity_category': None, - 'entity_id': 'light.gradient_light', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Gradient light', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:17:88:01:0b:0c:0d:0e-0f', - 'unit_of_measurement': None, - }) -# --- -# name: test_lights[light_payload6-expected6][light.gradient_light-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'brightness': 184, - 'color_mode': , - 'color_temp': None, - 'color_temp_kelvin': None, - 'effect': None, - 'effect_list': list([ - 'colorloop', - ]), - 'friendly_name': 'Gradient light', - 'hs_color': tuple( - 98.095, - 74.118, - ), - 'is_deconz_group': False, - 'max_color_temp_kelvin': 6535, - 'max_mireds': 500, - 'min_color_temp_kelvin': 2000, - 'min_mireds': 153, - 'rgb_color': tuple( - 135, - 255, - 66, - ), - 'supported_color_modes': list([ - , - , - , - ]), - 'supported_features': , - 'xy_color': tuple( - 0.2727, - 0.6226, - ), - }), - 'context': , - 'entity_id': 'light.gradient_light', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_lights[light_payload6][light.gradient_light-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/deconz/snapshots/test_number.ambr b/tests/components/deconz/snapshots/test_number.ambr index 5311addc7a1..26e044e1d31 100644 --- a/tests/components/deconz/snapshots/test_number.ambr +++ b/tests/components/deconz/snapshots/test_number.ambr @@ -1,54 +1,4 @@ # serializer version: 1 -# name: test_number_entities[sensor_payload0-expected0][binary_sensor.presence_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.presence_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_number_entities[sensor_payload0-expected0][binary_sensor.presence_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': False, - 'device_class': 'motion', - 'friendly_name': 'Presence sensor', - 'on': True, - 'temperature': 0.1, - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_number_entities[sensor_payload0-expected0][number.presence_sensor_delay-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -104,56 +54,6 @@ 'state': '0', }) # --- -# name: test_number_entities[sensor_payload1-expected1][binary_sensor.presence_sensor-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.presence_sensor', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Presence sensor', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00:00:00-00-presence', - 'unit_of_measurement': None, - }) -# --- -# name: test_number_entities[sensor_payload1-expected1][binary_sensor.presence_sensor-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'dark': False, - 'device_class': 'motion', - 'friendly_name': 'Presence sensor', - 'on': True, - 'temperature': 0.1, - }), - 'context': , - 'entity_id': 'binary_sensor.presence_sensor', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_number_entities[sensor_payload1-expected1][number.presence_sensor_duration-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/deconz/snapshots/test_select.ambr b/tests/components/deconz/snapshots/test_select.ambr index 12966709947..997eab0901f 100644 --- a/tests/components/deconz/snapshots/test_select.ambr +++ b/tests/components/deconz/snapshots/test_select.ambr @@ -506,3 +506,68 @@ 'state': 'medium', }) # --- +# name: test_select[sensor_payload3-expected3][select.ikea_starkvind_fan_mode-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'off', + 'auto', + 'speed_1', + 'speed_2', + 'speed_3', + 'speed_4', + 'speed_5', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'select', + 'entity_category': , + 'entity_id': 'select.ikea_starkvind_fan_mode', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IKEA Starkvind Fan Mode', + 'platform': 'deconz', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '0c:43:14:ff:fe:6c:20:12-01-fc7d-fan_mode', + 'unit_of_measurement': None, + }) +# --- +# name: test_select[sensor_payload3-expected3][select.ikea_starkvind_fan_mode-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'IKEA Starkvind Fan Mode', + 'options': list([ + 'off', + 'auto', + 'speed_1', + 'speed_2', + 'speed_3', + 'speed_4', + 'speed_5', + ]), + }), + 'context': , + 'entity_id': 'select.ikea_starkvind_fan_mode', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'speed_1', + }) +# --- diff --git a/tests/components/deconz/snapshots/test_sensor.ambr b/tests/components/deconz/snapshots/test_sensor.ambr index 7f12292abbd..dd097ea1c9a 100644 --- a/tests/components/deconz/snapshots/test_sensor.ambr +++ b/tests/components/deconz/snapshots/test_sensor.ambr @@ -548,53 +548,6 @@ 'state': '100', }) # --- -# name: test_sensors[config_entry_options0-sensor_payload12-expected12][binary_sensor.soil_sensor_low_battery-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.soil_sensor_low_battery', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Soil Sensor Low Battery', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'a4:c1:38:fe:86:8f:07:a3-01-0408-low_battery', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload12-expected12][binary_sensor.soil_sensor_low_battery-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery', - 'friendly_name': 'Soil Sensor Low Battery', - }), - 'context': , - 'entity_id': 'binary_sensor.soil_sensor_low_battery', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_sensors[config_entry_options0-sensor_payload12-expected12][sensor.soil_sensor-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1224,55 +1177,6 @@ 'state': '40', }) # --- -# name: test_sensors[config_entry_options0-sensor_payload19-expected19][binary_sensor.alarm_10-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.alarm_10', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alarm 10', - 'platform': 'deconz', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:15:8d:00:02:b5:d1:80-01-0500-alarm', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[config_entry_options0-sensor_payload19-expected19][binary_sensor.alarm_10-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'safety', - 'friendly_name': 'Alarm 10', - 'on': True, - 'temperature': 26.0, - }), - 'context': , - 'entity_id': 'binary_sensor.alarm_10', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_sensors[config_entry_options0-sensor_payload19-expected19][sensor.alarm_10_battery-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/deconz/test_config_flow.py b/tests/components/deconz/test_config_flow.py index 49711962407..8555a6e333b 100644 --- a/tests/components/deconz/test_config_flow.py +++ b/tests/components/deconz/test_config_flow.py @@ -22,12 +22,7 @@ from homeassistant.components.deconz.const import ( ) from homeassistant.components.hassio import HassioServiceInfo from homeassistant.components.ssdp import ATTR_UPNP_MANUFACTURER_URL, ATTR_UPNP_SERIAL -from homeassistant.config_entries import ( - SOURCE_HASSIO, - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PORT, CONTENT_TYPE_JSON from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -407,12 +402,7 @@ async def test_reauth_flow_update_configuration( config_entry_setup: MockConfigEntry, ) -> None: """Verify reauth flow can update gateway API key.""" - result = await hass.config_entries.flow.async_init( - DECONZ_DOMAIN, - data=config_entry_setup.data, - context={"source": SOURCE_REAUTH}, - ) - + result = await config_entry_setup.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "link" diff --git a/tests/components/deconz/test_select.py b/tests/components/deconz/test_select.py index 900283d88bb..c677853841c 100644 --- a/tests/components/deconz/test_select.py +++ b/tests/components/deconz/test_select.py @@ -4,6 +4,7 @@ from collections.abc import Callable from typing import Any from unittest.mock import patch +from pydeconz.models.sensor.air_purifier import AirPurifierFanMode from pydeconz.models.sensor.presence import ( PresenceConfigDeviceMode, PresenceConfigTriggerDistance, @@ -119,6 +120,42 @@ TEST_DATA = [ "request_data": {"triggerdistance": "far"}, }, ), + ( # Air Purifier Fan Mode + { + "config": { + "filterlifetime": 259200, + "ledindication": True, + "locked": False, + "mode": "speed_1", + "on": True, + "reachable": True, + }, + "ep": 1, + "etag": "de26d19d9e91b2db3ded6ee7ab6b6a4b", + "lastannounced": None, + "lastseen": "2024-08-07T18:27Z", + "manufacturername": "IKEA of Sweden", + "modelid": "STARKVIND Air purifier", + "name": "IKEA Starkvind", + "productid": "E2007", + "state": { + "deviceruntime": 73405, + "filterruntime": 73405, + "lastupdated": "2024-08-07T18:27:52.543", + "replacefilter": False, + "speed": 20, + }, + "swversion": "1.1.001", + "type": "ZHAAirPurifier", + "uniqueid": "0c:43:14:ff:fe:6c:20:12-01-fc7d", + }, + { + "entity_id": "select.ikea_starkvind_fan_mode", + "option": AirPurifierFanMode.AUTO.value, + "request": "/sensors/0/config", + "request_data": {"mode": "auto"}, + }, + ), ] diff --git a/tests/components/deluge/test_config_flow.py b/tests/components/deluge/test_config_flow.py index 37229d4a72e..c336fc81cc6 100644 --- a/tests/components/deluge/test_config_flow.py +++ b/tests/components/deluge/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch import pytest from homeassistant.components.deluge.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -113,16 +113,7 @@ async def test_flow_reauth(hass: HomeAssistant, api) -> None: entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/demo/test_media_player.py b/tests/components/demo/test_media_player.py index a6669fa705c..7487a4c13e3 100644 --- a/tests/components/demo/test_media_player.py +++ b/tests/components/demo/test_media_player.py @@ -497,7 +497,7 @@ async def test_media_image_proxy( class MockResponse: """Test response.""" - def __init__(self): + def __init__(self) -> None: """Test response init.""" self.status = 200 self.headers = {"Content-Type": "sometype"} diff --git a/tests/components/denonavr/test_media_player.py b/tests/components/denonavr/test_media_player.py index c294c449518..6550b31b1f9 100644 --- a/tests/components/denonavr/test_media_player.py +++ b/tests/components/denonavr/test_media_player.py @@ -60,7 +60,7 @@ def client_fixture(): yield mock_client_class.return_value -async def setup_denonavr(hass): +async def setup_denonavr(hass: HomeAssistant) -> None: """Initialize media_player for tests.""" entry_data = { CONF_HOST: TEST_HOST, diff --git a/tests/components/derivative/test_sensor.py b/tests/components/derivative/test_sensor.py index df050c58f10..3646340cac3 100644 --- a/tests/components/derivative/test_sensor.py +++ b/tests/components/derivative/test_sensor.py @@ -3,12 +3,13 @@ from datetime import timedelta from math import sin import random +from typing import Any from freezegun import freeze_time from homeassistant.components.derivative.const import DOMAIN from homeassistant.const import UnitOfPower, UnitOfTime -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -49,7 +50,9 @@ async def test_state(hass: HomeAssistant) -> None: assert state.attributes.get("unit_of_measurement") == "kW" -async def _setup_sensor(hass, config): +async def _setup_sensor( + hass: HomeAssistant, config: dict[str, Any] +) -> tuple[dict[str, Any], str]: default_config = { "platform": "derivative", "name": "power", @@ -67,7 +70,13 @@ async def _setup_sensor(hass, config): return config, entity_id -async def setup_tests(hass, config, times, values, expected_state): +async def setup_tests( + hass: HomeAssistant, + config: dict[str, Any], + times: list[int], + values: list[float], + expected_state: float, +) -> State: """Test derivative sensor state.""" config, entity_id = await _setup_sensor(hass, config) diff --git a/tests/components/device_tracker/common.py b/tests/components/device_tracker/common.py index d30db984a66..b6341443d36 100644 --- a/tests/components/device_tracker/common.py +++ b/tests/components/device_tracker/common.py @@ -61,7 +61,7 @@ def async_see( class MockScannerEntity(ScannerEntity): """Test implementation of a ScannerEntity.""" - def __init__(self): + def __init__(self) -> None: """Init.""" self.connected = False self._hostname = "test.hostname.org" @@ -110,7 +110,7 @@ class MockScannerEntity(ScannerEntity): class MockScanner(DeviceScanner): """Mock device scanner.""" - def __init__(self): + def __init__(self) -> None: """Initialize the MockScanner.""" self.devices_home = [] diff --git a/tests/components/devolo_home_control/test_config_flow.py b/tests/components/devolo_home_control/test_config_flow.py index 48f9bf31f4f..7c9bfdeff63 100644 --- a/tests/components/devolo_home_control/test_config_flow.py +++ b/tests/components/devolo_home_control/test_config_flow.py @@ -164,21 +164,17 @@ async def test_zeroconf_wrong_device(hass: HomeAssistant) -> None: async def test_form_reauth(hass: HomeAssistant) -> None: """Test that the reauth confirmation form is served.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) - + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -205,20 +201,17 @@ async def test_form_reauth(hass: HomeAssistant) -> None: @pytest.mark.parametrize("credentials_valid", [False]) async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: """Test if we get the error message on invalid credentials.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -230,20 +223,17 @@ async def test_form_invalid_credentials_reauth(hass: HomeAssistant) -> None: async def test_form_uuid_change_reauth(hass: HomeAssistant) -> None: """Test that the reauth confirmation form is served.""" - mock_config = MockConfigEntry(domain=DOMAIN, unique_id="123456", data={}) - mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, + mock_config = MockConfigEntry( + domain=DOMAIN, + unique_id="123456", data={ "username": "test-username", "password": "test-password", "mydevolo_url": "https://test_mydevolo_url.test", }, ) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/devolo_home_network/test_config_flow.py b/tests/components/devolo_home_network/test_config_flow.py index 5aa2bfa274e..5234d0f073e 100644 --- a/tests/components/devolo_home_network/test_config_flow.py +++ b/tests/components/devolo_home_network/test_config_flow.py @@ -179,18 +179,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: entry = configure_integration(hass) await hass.config_entries.async_setup(entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": { - CONF_NAME: DISCOVERY_INFO.hostname.split(".")[0], - }, - }, - data=entry.data, - ) - + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/discord/__init__.py b/tests/components/discord/__init__.py index bf7c188b7b5..1d81388d1e3 100644 --- a/tests/components/discord/__init__.py +++ b/tests/components/discord/__init__.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock, Mock, patch import nextcord from homeassistant.components.discord.const import DOMAIN -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_API_TOKEN, CONF_NAME from homeassistant.core import HomeAssistant @@ -22,7 +21,7 @@ CONF_DATA = { } -def create_entry(hass: HomeAssistant) -> ConfigEntry: +def create_entry(hass: HomeAssistant) -> MockConfigEntry: """Add config entry in Home Assistant.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/discord/test_config_flow.py b/tests/components/discord/test_config_flow.py index 9b37179e86d..e9a1344c555 100644 --- a/tests/components/discord/test_config_flow.py +++ b/tests/components/discord/test_config_flow.py @@ -4,7 +4,7 @@ import nextcord from homeassistant import config_entries from homeassistant.components.discord.const import DOMAIN -from homeassistant.const import CONF_API_TOKEN, CONF_SOURCE +from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -123,16 +123,7 @@ async def test_flow_user_unknown_error(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test a reauth flow.""" entry = create_entry(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/dormakaba_dkey/test_config_flow.py b/tests/components/dormakaba_dkey/test_config_flow.py index 499e5844949..8d8140d609a 100644 --- a/tests/components/dormakaba_dkey/test_config_flow.py +++ b/tests/components/dormakaba_dkey/test_config_flow.py @@ -310,11 +310,7 @@ async def test_reauth(hass: HomeAssistant) -> None: data={"address": DKEY_DISCOVERY_INFO.address}, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/dremel_3d_printer/conftest.py b/tests/components/dremel_3d_printer/conftest.py index 6490b844dc0..cc70537db3d 100644 --- a/tests/components/dremel_3d_printer/conftest.py +++ b/tests/components/dremel_3d_printer/conftest.py @@ -34,7 +34,7 @@ def connection() -> None: """Mock Dremel 3D Printer connection.""" with requests_mock.Mocker() as mock: mock.post( - f"http://{HOST}:80/command", + f"http://{HOST}/command", response_list=[ {"text": load_fixture("dremel_3d_printer/command_1.json")}, {"text": load_fixture("dremel_3d_printer/command_2.json")}, diff --git a/tests/components/drop_connect/common.py b/tests/components/drop_connect/common.py index bdba79bbd95..9eb76f57dad 100644 --- a/tests/components/drop_connect/common.py +++ b/tests/components/drop_connect/common.py @@ -34,6 +34,10 @@ TEST_DATA_SALT_TOPIC = "drop_connect/DROP-1_C0FFEE/8" TEST_DATA_SALT = '{"salt":1}' TEST_DATA_SALT_RESET = '{"salt":0}' +TEST_DATA_ALERT_TOPIC = "drop_connect/DROP-1_C0FFEE/81" +TEST_DATA_ALERT = '{"battery":100,"sens":1,"pwrOff":0,"temp":68.2}' +TEST_DATA_ALERT_RESET = '{"battery":0,"sens":0,"pwrOff":1,"temp":0}' + TEST_DATA_LEAK_TOPIC = "drop_connect/DROP-1_C0FFEE/20" TEST_DATA_LEAK = '{"battery":100,"leak":1,"temp":68.2}' TEST_DATA_LEAK_RESET = '{"battery":0,"leak":0,"temp":0}' @@ -109,6 +113,25 @@ def config_entry_salt() -> ConfigEntry: ) +def config_entry_alert() -> ConfigEntry: + """Config entry version 1 fixture.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id="DROP-1_C0FFEE_81", + data={ + CONF_COMMAND_TOPIC: "drop_connect/DROP-1_C0FFEE/81/cmd", + CONF_DATA_TOPIC: "drop_connect/DROP-1_C0FFEE/81/#", + CONF_DEVICE_DESC: "Alert", + CONF_DEVICE_ID: 81, + CONF_DEVICE_NAME: "Alert", + CONF_DEVICE_TYPE: "alrt", + CONF_HUB_ID: "DROP-1_C0FFEE", + CONF_DEVICE_OWNER_ID: "DROP-1_C0FFEE_255", + }, + version=1, + ) + + def config_entry_leak() -> ConfigEntry: """Config entry version 1 fixture.""" return MockConfigEntry( diff --git a/tests/components/drop_connect/snapshots/test_binary_sensor.ambr b/tests/components/drop_connect/snapshots/test_binary_sensor.ambr index c42cdb8cde1..9b0cc201573 100644 --- a/tests/components/drop_connect/snapshots/test_binary_sensor.ambr +++ b/tests/components/drop_connect/snapshots/test_binary_sensor.ambr @@ -1,4 +1,98 @@ # serializer version: 1 +# name: test_sensors[alert][binary_sensor.alert_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.alert_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power', + 'platform': 'drop_connect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'DROP-1_C0FFEE_81_power', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[alert][binary_sensor.alert_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Alert Power', + }), + 'context': , + 'entity_id': 'binary_sensor.alert_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_sensors[alert][binary_sensor.alert_sensor-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.alert_sensor', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Sensor', + 'platform': 'drop_connect', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'alert_sensor', + 'unique_id': 'DROP-1_C0FFEE_81_alert_sensor', + 'unit_of_measurement': None, + }) +# --- +# name: test_sensors[alert][binary_sensor.alert_sensor-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'problem', + 'friendly_name': 'Alert Sensor', + }), + 'context': , + 'entity_id': 'binary_sensor.alert_sensor', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- # name: test_sensors[hub][binary_sensor.hub_drop_1_c0ffee_leak_detected-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/drop_connect/snapshots/test_sensor.ambr b/tests/components/drop_connect/snapshots/test_sensor.ambr index 54e3259e455..a5c91dbe3e4 100644 --- a/tests/components/drop_connect/snapshots/test_sensor.ambr +++ b/tests/components/drop_connect/snapshots/test_sensor.ambr @@ -1,4 +1,68 @@ # serializer version: 1 +# name: test_sensors[alert][sensor.alert_battery-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Alert Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.alert_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '100', + }) +# --- +# name: test_sensors[alert][sensor.alert_battery-reset] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Alert Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.alert_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_sensors[alert][sensor.alert_temperature-data] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Alert Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.alert_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.1111111111111', + }) +# --- +# name: test_sensors[alert][sensor.alert_temperature-reset] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Alert Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.alert_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '-17.7777777777778', + }) +# --- # name: test_sensors[filter][sensor.filter_battery-data] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/drop_connect/test_binary_sensor.py b/tests/components/drop_connect/test_binary_sensor.py index 895921291ef..ab89e05d809 100644 --- a/tests/components/drop_connect/test_binary_sensor.py +++ b/tests/components/drop_connect/test_binary_sensor.py @@ -10,6 +10,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( + TEST_DATA_ALERT, + TEST_DATA_ALERT_RESET, + TEST_DATA_ALERT_TOPIC, TEST_DATA_HUB, TEST_DATA_HUB_RESET, TEST_DATA_HUB_TOPIC, @@ -28,6 +31,7 @@ from .common import ( TEST_DATA_SOFTENER, TEST_DATA_SOFTENER_RESET, TEST_DATA_SOFTENER_TOPIC, + config_entry_alert, config_entry_hub, config_entry_leak, config_entry_protection_valve, @@ -44,6 +48,12 @@ from tests.typing import MqttMockHAClient ("config_entry", "topic", "reset", "data"), [ (config_entry_hub(), TEST_DATA_HUB_TOPIC, TEST_DATA_HUB_RESET, TEST_DATA_HUB), + ( + config_entry_alert(), + TEST_DATA_ALERT_TOPIC, + TEST_DATA_ALERT_RESET, + TEST_DATA_ALERT, + ), ( config_entry_leak(), TEST_DATA_LEAK_TOPIC, @@ -77,6 +87,7 @@ from tests.typing import MqttMockHAClient ], ids=[ "hub", + "alert", "leak", "softener", "protection_valve", diff --git a/tests/components/drop_connect/test_sensor.py b/tests/components/drop_connect/test_sensor.py index cb56522a09d..c33f0aefe37 100644 --- a/tests/components/drop_connect/test_sensor.py +++ b/tests/components/drop_connect/test_sensor.py @@ -11,6 +11,9 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from .common import ( + TEST_DATA_ALERT, + TEST_DATA_ALERT_RESET, + TEST_DATA_ALERT_TOPIC, TEST_DATA_FILTER, TEST_DATA_FILTER_RESET, TEST_DATA_FILTER_TOPIC, @@ -32,6 +35,7 @@ from .common import ( TEST_DATA_SOFTENER, TEST_DATA_SOFTENER_RESET, TEST_DATA_SOFTENER_TOPIC, + config_entry_alert, config_entry_filter, config_entry_hub, config_entry_leak, @@ -57,6 +61,12 @@ def only_sensor_platform() -> Generator[None]: ("config_entry", "topic", "reset", "data"), [ (config_entry_hub(), TEST_DATA_HUB_TOPIC, TEST_DATA_HUB_RESET, TEST_DATA_HUB), + ( + config_entry_alert(), + TEST_DATA_ALERT_TOPIC, + TEST_DATA_ALERT_RESET, + TEST_DATA_ALERT, + ), ( config_entry_leak(), TEST_DATA_LEAK_TOPIC, @@ -96,6 +106,7 @@ def only_sensor_platform() -> Generator[None]: ], ids=[ "hub", + "alert", "leak", "softener", "filter", diff --git a/tests/components/dsmr/test_sensor.py b/tests/components/dsmr/test_sensor.py index b93dd8d18d2..c2c6d48b007 100644 --- a/tests/components/dsmr/test_sensor.py +++ b/tests/components/dsmr/test_sensor.py @@ -1521,7 +1521,7 @@ async def test_gas_meter_providing_energy_reading( ) -def test_all_obis_references_exists(): +def test_all_obis_references_exists() -> None: """Verify that all attributes exist by name in database.""" for sensor in SENSORS: assert hasattr(obis_references, sensor.obis_reference) diff --git a/tests/components/duckdns/test_init.py b/tests/components/duckdns/test_init.py index c06add7156a..313cc91aa18 100644 --- a/tests/components/duckdns/test_init.py +++ b/tests/components/duckdns/test_init.py @@ -8,7 +8,6 @@ import pytest from homeassistant.components import duckdns from homeassistant.components.duckdns import async_track_time_interval_backoff from homeassistant.core import HomeAssistant -from homeassistant.loader import bind_hass from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -21,8 +20,7 @@ _LOGGER = logging.getLogger(__name__) INTERVAL = duckdns.INTERVAL -@bind_hass -async def async_set_txt(hass, txt): +async def async_set_txt(hass: HomeAssistant, txt: str | None) -> None: """Set the txt record. Pass in None to remove it. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/dynalite/common.py b/tests/components/dynalite/common.py index 91458b0aaff..640b6b3e24f 100644 --- a/tests/components/dynalite/common.py +++ b/tests/components/dynalite/common.py @@ -2,8 +2,11 @@ from unittest.mock import AsyncMock, Mock, call, patch +from dynalite_devices_lib.dynalitebase import DynaliteBaseDevice + from homeassistant.components import dynalite from homeassistant.const import ATTR_SERVICE +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -21,14 +24,14 @@ def create_mock_device(platform, spec): return device -async def get_entry_id_from_hass(hass): +async def get_entry_id_from_hass(hass: HomeAssistant) -> str: """Get the config entry id from hass.""" conf_entries = hass.config_entries.async_entries(dynalite.DOMAIN) assert len(conf_entries) == 1 return conf_entries[0].entry_id -async def create_entity_from_device(hass, device): +async def create_entity_from_device(hass: HomeAssistant, device: DynaliteBaseDevice): """Set up the component and platform and create a light based on the device provided.""" host = "1.2.3.4" entry = MockConfigEntry(domain=dynalite.DOMAIN, data={dynalite.CONF_HOST: host}) @@ -45,7 +48,7 @@ async def create_entity_from_device(hass, device): return mock_dyn_dev.mock_calls[1][2]["update_device_func"] -async def run_service_tests(hass, device, platform, services): +async def run_service_tests(hass: HomeAssistant, device, platform, services): """Run a series of service calls and check that the entity and device behave correctly.""" for cur_item in services: service = cur_item[ATTR_SERVICE] diff --git a/tests/components/dynalite/test_cover.py b/tests/components/dynalite/test_cover.py index c43d349d184..930318978fc 100644 --- a/tests/components/dynalite/test_cover.py +++ b/tests/components/dynalite/test_cover.py @@ -1,8 +1,10 @@ """Test Dynalite cover.""" +from collections.abc import Callable from unittest.mock import Mock from dynalite_devices_lib.cover import DynaliteTimeCoverWithTiltDevice +from dynalite_devices_lib.dynalitebase import DynaliteBaseDevice import pytest from homeassistant.components.cover import ( @@ -36,7 +38,7 @@ from tests.common import mock_restore_cache @pytest.fixture -def mock_device(): +def mock_device() -> Mock: """Mock a Dynalite device.""" mock_dev = create_mock_device("cover", DynaliteTimeCoverWithTiltDevice) mock_dev.device_class = CoverDeviceClass.BLIND.value @@ -54,7 +56,7 @@ def mock_device(): return mock_dev -async def test_cover_setup(hass: HomeAssistant, mock_device) -> None: +async def test_cover_setup(hass: HomeAssistant, mock_device: Mock) -> None: """Test a successful setup.""" await create_entity_from_device(hass, mock_device) entity_state = hass.states.get("cover.name") @@ -93,7 +95,7 @@ async def test_cover_setup(hass: HomeAssistant, mock_device) -> None: ) -async def test_cover_without_tilt(hass: HomeAssistant, mock_device) -> None: +async def test_cover_without_tilt(hass: HomeAssistant, mock_device: Mock) -> None: """Test a cover with no tilt.""" mock_device.has_tilt = False await create_entity_from_device(hass, mock_device) @@ -106,8 +108,14 @@ async def test_cover_without_tilt(hass: HomeAssistant, mock_device) -> None: async def check_cover_position( - hass, update_func, device, closing, opening, closed, expected -): + hass: HomeAssistant, + update_func: Callable[[DynaliteBaseDevice | None], None], + device: Mock, + closing: bool, + opening: bool, + closed: bool, + expected: str, +) -> None: """Check that a given position behaves correctly.""" device.is_closing = closing device.is_opening = opening @@ -118,7 +126,7 @@ async def check_cover_position( assert entity_state.state == expected -async def test_cover_positions(hass: HomeAssistant, mock_device) -> None: +async def test_cover_positions(hass: HomeAssistant, mock_device: Mock) -> None: """Test that the state updates in the various positions.""" update_func = await create_entity_from_device(hass, mock_device) await check_cover_position( @@ -135,7 +143,7 @@ async def test_cover_positions(hass: HomeAssistant, mock_device) -> None: ) -async def test_cover_restore_state(hass: HomeAssistant, mock_device) -> None: +async def test_cover_restore_state(hass: HomeAssistant, mock_device: Mock) -> None: """Test restore from cache.""" mock_restore_cache( hass, @@ -147,7 +155,9 @@ async def test_cover_restore_state(hass: HomeAssistant, mock_device) -> None: assert entity_state.state == STATE_OPEN -async def test_cover_restore_state_bad_cache(hass: HomeAssistant, mock_device) -> None: +async def test_cover_restore_state_bad_cache( + hass: HomeAssistant, mock_device: Mock +) -> None: """Test restore from a cache without the attribute.""" mock_restore_cache( hass, diff --git a/tests/components/eafm/test_sensor.py b/tests/components/eafm/test_sensor.py index 986e1153cac..add604167b9 100644 --- a/tests/components/eafm/test_sensor.py +++ b/tests/components/eafm/test_sensor.py @@ -1,6 +1,9 @@ """Tests for polling measures.""" +from collections.abc import Callable, Coroutine import datetime +from typing import Any +from unittest.mock import AsyncMock import aiohttp import pytest @@ -23,7 +26,9 @@ CONNECTION_EXCEPTIONS = [ ] -async def async_setup_test_fixture(hass, mock_get_station, initial_value): +async def async_setup_test_fixture( + hass: HomeAssistant, mock_get_station: AsyncMock, initial_value: dict[str, Any] +) -> tuple[MockConfigEntry, Callable[[Any], Coroutine[Any, Any, None]]]: """Create a dummy config entry for testing polling.""" mock_get_station.return_value = initial_value diff --git a/tests/components/ecovacs/test_config_flow.py b/tests/components/ecovacs/test_config_flow.py index 0a161f88baa..5bf1144db0b 100644 --- a/tests/components/ecovacs/test_config_flow.py +++ b/tests/components/ecovacs/test_config_flow.py @@ -11,28 +11,23 @@ from deebot_client.mqtt_client import create_mqtt_config import pytest from homeassistant.components.ecovacs.const import ( - CONF_CONTINENT, CONF_OVERRIDE_MQTT_URL, CONF_OVERRIDE_REST_URL, CONF_VERIFY_MQTT_CERTIFICATE, DOMAIN, InstanceMode, ) -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_COUNTRY, CONF_MODE, CONF_USERNAME -from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_MODE, CONF_USERNAME +from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import issue_registry as ir from .const import ( - IMPORT_DATA, VALID_ENTRY_DATA_CLOUD, VALID_ENTRY_DATA_SELF_HOSTED, VALID_ENTRY_DATA_SELF_HOSTED_WITH_VALIDATE_CERT, ) -from tests.common import MockConfigEntry - _USER_STEP_SELF_HOSTED = {CONF_MODE: InstanceMode.SELF_HOSTED} _TEST_FN_AUTH_ARG = "user_input_auth" @@ -303,116 +298,3 @@ async def test_user_flow_self_hosted_error( mock_setup_entry.assert_called() mock_authenticator_authenticate.assert_called() mock_mqtt_client.verify_config.assert_called() - - -async def test_import_flow( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_setup_entry: AsyncMock, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, -) -> None: - """Test importing yaml config.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=IMPORT_DATA.copy(), - ) - mock_authenticator_authenticate.assert_called() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == VALID_ENTRY_DATA_CLOUD[CONF_USERNAME] - assert result["data"] == VALID_ENTRY_DATA_CLOUD - assert (HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}") in issue_registry.issues - mock_setup_entry.assert_called() - mock_mqtt_client.verify_config.assert_called() - - -async def test_import_flow_already_configured( - hass: HomeAssistant, issue_registry: ir.IssueRegistry -) -> None: - """Test importing yaml config where entry already configured.""" - entry = MockConfigEntry(domain=DOMAIN, data=VALID_ENTRY_DATA_CLOUD) - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=IMPORT_DATA.copy(), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - assert (HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}") in issue_registry.issues - - -@pytest.mark.parametrize("show_advanced_options", [True, False]) -@pytest.mark.parametrize( - ("side_effect", "reason"), - [ - (ClientError, "cannot_connect"), - (InvalidAuthenticationError, "invalid_auth"), - (Exception, "unknown"), - ], -) -async def test_import_flow_error( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, - side_effect: Exception, - reason: str, - show_advanced_options: bool, -) -> None: - """Test handling invalid connection.""" - mock_authenticator_authenticate.side_effect = side_effect - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_IMPORT, - "show_advanced_options": show_advanced_options, - }, - data=IMPORT_DATA.copy(), - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert ( - DOMAIN, - f"deprecated_yaml_import_issue_{reason}", - ) in issue_registry.issues - mock_authenticator_authenticate.assert_called() - - -@pytest.mark.parametrize("show_advanced_options", [True, False]) -@pytest.mark.parametrize( - ("reason", "user_input"), - [ - ("invalid_country_length", IMPORT_DATA | {CONF_COUNTRY: "too_long"}), - ("invalid_country_length", IMPORT_DATA | {CONF_COUNTRY: "a"}), # too short - ("invalid_continent_length", IMPORT_DATA | {CONF_CONTINENT: "too_long"}), - ("invalid_continent_length", IMPORT_DATA | {CONF_CONTINENT: "a"}), # too short - ("continent_not_match", IMPORT_DATA | {CONF_CONTINENT: "AA"}), - ], -) -async def test_import_flow_invalid_data( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, - reason: str, - user_input: dict[str, Any], - show_advanced_options: bool, -) -> None: - """Test handling invalid connection.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_IMPORT, - "show_advanced_options": show_advanced_options, - }, - data=user_input, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == reason - assert ( - DOMAIN, - f"deprecated_yaml_import_issue_{reason}", - ) in issue_registry.issues diff --git a/tests/components/ecovacs/test_init.py b/tests/components/ecovacs/test_init.py index ac4d5661a83..2185ae4c9eb 100644 --- a/tests/components/ecovacs/test_init.py +++ b/tests/components/ecovacs/test_init.py @@ -1,7 +1,6 @@ """Test init of ecovacs.""" -from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import Mock, patch from deebot_client.exceptions import DeebotError, InvalidAuthenticationError import pytest @@ -12,9 +11,6 @@ from homeassistant.components.ecovacs.controller import EcovacsController from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr -from homeassistant.setup import async_setup_component - -from .const import IMPORT_DATA from tests.common import MockConfigEntry @@ -88,32 +84,6 @@ async def test_invalid_auth( assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR -@pytest.mark.parametrize( - ("config", "config_entries_expected"), - [ - ({}, 0), - ({DOMAIN: IMPORT_DATA.copy()}, 1), - ], - ids=["no_config", "import_config"], -) -async def test_async_setup_import( - hass: HomeAssistant, - config: dict[str, Any], - config_entries_expected: int, - mock_setup_entry: AsyncMock, - mock_authenticator_authenticate: AsyncMock, - mock_mqtt_client: Mock, -) -> None: - """Test async_setup config import.""" - assert len(hass.config_entries.async_entries(DOMAIN)) == 0 - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - assert len(hass.config_entries.async_entries(DOMAIN)) == config_entries_expected - assert mock_setup_entry.call_count == config_entries_expected - assert mock_authenticator_authenticate.call_count == config_entries_expected - assert mock_mqtt_client.verify_config.call_count == config_entries_expected - - async def test_devices_in_dr( device_registry: dr.DeviceRegistry, controller: EcovacsController, diff --git a/tests/components/efergy/test_config_flow.py b/tests/components/efergy/test_config_flow.py index 9a66c42bc9a..8b77bbdc7ab 100644 --- a/tests/components/efergy/test_config_flow.py +++ b/tests/components/efergy/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import patch from pyefergy import exceptions from homeassistant.components.efergy.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -76,20 +76,11 @@ async def test_flow_user_unknown(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test reauth step.""" entry = create_entry(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + with _patch_efergy(), _patch_setup(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - new_conf = {CONF_API_KEY: "1234567890"} result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/electric_kiwi/test_config_flow.py b/tests/components/electric_kiwi/test_config_flow.py index bf248aafb13..d23e70422dd 100644 --- a/tests/components/electric_kiwi/test_config_flow.py +++ b/tests/components/electric_kiwi/test_config_flow.py @@ -18,7 +18,6 @@ from homeassistant.components.electric_kiwi.const import ( OAUTH2_TOKEN, SCOPE_VALUES, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -160,16 +159,11 @@ async def test_reauthentication( setup_credentials: None, ) -> None: """Test Electric Kiwi reauthentication.""" + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": DOMAIN} - ) - - flows = hass.config_entries.flow.async_progress() - assert len(flows) == 1 - assert "flow_id" in flows[0] - - result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) + result = await hass.config_entries.flow.async_configure(result["flow_id"], {}) state = config_entry_oauth2_flow._encode_jwt( hass, diff --git a/tests/components/elevenlabs/conftest.py b/tests/components/elevenlabs/conftest.py index 13eb022243f..c4d9a87b5ad 100644 --- a/tests/components/elevenlabs/conftest.py +++ b/tests/components/elevenlabs/conftest.py @@ -16,7 +16,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" with patch( "homeassistant.components.elevenlabs.async_setup_entry", return_value=True @@ -25,7 +25,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_async_client() -> Generator[AsyncMock, None, None]: +def mock_async_client() -> Generator[AsyncMock]: """Override async ElevenLabs client.""" client_mock = AsyncMock() client_mock.voices.get_all.return_value = GetVoicesResponse(voices=MOCK_VOICES) @@ -37,7 +37,7 @@ def mock_async_client() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_async_client_fail() -> Generator[AsyncMock, None, None]: +def mock_async_client_fail() -> Generator[AsyncMock]: """Override async ElevenLabs client.""" with patch( "homeassistant.components.elevenlabs.config_flow.AsyncElevenLabs", diff --git a/tests/components/elevenlabs/test_tts.py b/tests/components/elevenlabs/test_tts.py index 7fa289f24ed..8b14ab26487 100644 --- a/tests/components/elevenlabs/test_tts.py +++ b/tests/components/elevenlabs/test_tts.py @@ -3,8 +3,9 @@ from __future__ import annotations from http import HTTPStatus +from pathlib import Path from typing import Any -from unittest.mock import AsyncMock, patch +from unittest.mock import AsyncMock, MagicMock, patch from elevenlabs.core import ApiError from elevenlabs.types import GetVoicesResponse @@ -29,14 +30,13 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock): +def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: """Mock writing tags.""" @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir): +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture diff --git a/tests/components/elmax/test_config_flow.py b/tests/components/elmax/test_config_flow.py index 85e14dd0a3f..7a4d9755fa5 100644 --- a/tests/components/elmax/test_config_flow.py +++ b/tests/components/elmax/test_config_flow.py @@ -21,7 +21,6 @@ from homeassistant.components.elmax.const import ( CONF_ELMAX_USERNAME, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -544,20 +543,7 @@ async def test_show_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -577,24 +563,11 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.elmax.async_setup_entry", return_value=True, ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -624,24 +597,11 @@ async def test_reauth_panel_disappeared(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.list_control_panels", return_value=[], ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -670,24 +630,11 @@ async def test_reauth_invalid_pin(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.get_panel_status", side_effect=ElmaxBadPinError(), ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { @@ -716,24 +663,11 @@ async def test_reauth_bad_login(hass: HomeAssistant) -> None: entry.add_to_hass(hass) # Trigger reauth + reauth_result = await entry.start_reauth_flow(hass) with patch( "elmax_api.http.Elmax.login", side_effect=ElmaxBadLoginError(), ): - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data={ - CONF_ELMAX_PANEL_ID: MOCK_PANEL_ID, - CONF_ELMAX_PANEL_PIN: MOCK_PANEL_PIN, - CONF_ELMAX_USERNAME: MOCK_USERNAME, - CONF_ELMAX_PASSWORD: MOCK_PASSWORD, - }, - ) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], { diff --git a/tests/components/energy/test_sensor.py b/tests/components/energy/test_sensor.py index 0439ac2c028..a27451b853d 100644 --- a/tests/components/energy/test_sensor.py +++ b/tests/components/energy/test_sensor.py @@ -1,5 +1,6 @@ """Test the Energy sensors.""" +from collections.abc import Callable, Coroutine import copy from datetime import timedelta from typing import Any @@ -37,10 +38,12 @@ TEST_TIME_ADVANCE_INTERVAL = timedelta(milliseconds=10) @pytest.fixture -async def setup_integration(recorder_mock: Recorder): +async def setup_integration( + recorder_mock: Recorder, +) -> Callable[[HomeAssistant], Coroutine[Any, Any, None]]: """Set up the integration.""" - async def setup_integration(hass): + async def setup_integration(hass: HomeAssistant) -> None: assert await async_setup_component(hass, "energy", {}) await hass.async_block_till_done() diff --git a/tests/components/enphase_envoy/test_config_flow.py b/tests/components/enphase_envoy/test_config_flow.py index c2cc02fcc7c..f61a0054ed9 100644 --- a/tests/components/enphase_envoy/test_config_flow.py +++ b/tests/components/enphase_envoy/test_config_flow.py @@ -14,7 +14,6 @@ from homeassistant.components.enphase_envoy.const import ( OPTION_DIAGNOSTICS_INCLUDE_FIXTURES_DEFAULT_VALUE, ) from homeassistant.config_entries import ( - SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER, SOURCE_ZEROCONF, @@ -636,14 +635,7 @@ async def test_reauth( ) -> None: """Test we reauth auth.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { diff --git a/tests/components/environment_canada/test_config_flow.py b/tests/components/environment_canada/test_config_flow.py index f2c35ab4295..d61966e8da1 100644 --- a/tests/components/environment_canada/test_config_flow.py +++ b/tests/components/environment_canada/test_config_flow.py @@ -1,7 +1,7 @@ """Test the Environment Canada (EC) config flow.""" from unittest.mock import AsyncMock, MagicMock, Mock, patch -import xml.etree.ElementTree as et +import xml.etree.ElementTree as ET import aiohttp import pytest @@ -94,7 +94,7 @@ async def test_create_same_entry_twice(hass: HomeAssistant) -> None: (aiohttp.ClientResponseError(Mock(), (), status=404), "bad_station_id"), (aiohttp.ClientResponseError(Mock(), (), status=400), "error_response"), (aiohttp.ClientConnectionError, "cannot_connect"), - (et.ParseError, "bad_station_id"), + (ET.ParseError, "bad_station_id"), (ValueError, "unknown"), ], ) diff --git a/tests/components/esphome/conftest.py b/tests/components/esphome/conftest.py index 75be231558f..b3966875a31 100644 --- a/tests/components/esphome/conftest.py +++ b/tests/components/esphome/conftest.py @@ -205,6 +205,7 @@ class MockESPHomeDevice: self.home_assistant_state_subscription_callback: Callable[ [str, str | None], None ] + self.home_assistant_state_request_callback: Callable[[str, str | None], None] self.voice_assistant_handle_start_callback: Callable[ [str, int, VoiceAssistantAudioSettings, str | None], Coroutine[Any, Any, int | None], @@ -268,9 +269,11 @@ class MockESPHomeDevice: def set_home_assistant_state_subscription_callback( self, on_state_sub: Callable[[str, str | None], None], + on_state_request: Callable[[str, str | None], None], ) -> None: """Set the state call callback.""" self.home_assistant_state_subscription_callback = on_state_sub + self.home_assistant_state_request_callback = on_state_request def mock_home_assistant_state_subscription( self, entity_id: str, attribute: str | None @@ -278,6 +281,12 @@ class MockESPHomeDevice: """Mock a state subscription.""" self.home_assistant_state_subscription_callback(entity_id, attribute) + def mock_home_assistant_state_request( + self, entity_id: str, attribute: str | None + ) -> None: + """Mock a state request.""" + self.home_assistant_state_request_callback(entity_id, attribute) + def set_subscribe_voice_assistant_callbacks( self, handle_start: Callable[ @@ -378,9 +387,12 @@ async def _mock_generic_device_entry( def _subscribe_home_assistant_states( on_state_sub: Callable[[str, str | None], None], + on_state_request: Callable[[str, str | None], None], ) -> None: """Subscribe to home assistant states.""" - mock_device.set_home_assistant_state_subscription_callback(on_state_sub) + mock_device.set_home_assistant_state_subscription_callback( + on_state_sub, on_state_request + ) def _subscribe_voice_assistant( *, @@ -421,7 +433,7 @@ async def _mock_generic_device_entry( class MockReconnectLogic(BaseMockReconnectLogic): """Mock ReconnectLogic.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init the mock.""" super().__init__(*args, **kwargs) mock_device.set_on_disconnect(kwargs["on_disconnect"]) diff --git a/tests/components/esphome/test_config_flow.py b/tests/components/esphome/test_config_flow.py index 68af6665380..2f91921e7f2 100644 --- a/tests/components/esphome/test_config_flow.py +++ b/tests/components/esphome/test_config_flow.py @@ -798,14 +798,7 @@ async def test_reauth_initiation(hass: HomeAssistant, mock_client) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -821,14 +814,7 @@ async def test_reauth_confirm_valid( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") result = await hass.config_entries.flow.async_configure( @@ -875,14 +861,7 @@ async def test_reauth_fixed_via_dashboard( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -896,7 +875,7 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( hass: HomeAssistant, mock_client, mock_dashboard: dict[str, Any], - mock_config_entry, + mock_config_entry: MockConfigEntry, mock_setup_entry: None, ) -> None: """Test reauth fixed automatically via dashboard with password removed.""" @@ -918,14 +897,7 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -938,21 +910,14 @@ async def test_reauth_fixed_via_dashboard_add_encryption_remove_password( async def test_reauth_fixed_via_remove_password( hass: HomeAssistant, mock_client, - mock_config_entry, + mock_config_entry: MockConfigEntry, mock_dashboard: dict[str, Any], mock_setup_entry: None, ) -> None: """Test reauth fixed automatically by seeing password removed.""" mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT, result assert result["reason"] == "reauth_successful" @@ -981,14 +946,7 @@ async def test_reauth_fixed_via_dashboard_at_confirm( mock_client.device_info.return_value = DeviceInfo(uses_password=False, name="test") - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM, result assert result["step_id"] == "reauth_confirm" @@ -1027,14 +985,7 @@ async def test_reauth_confirm_invalid( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError result = await hass.config_entries.flow.async_configure( @@ -1070,14 +1021,7 @@ async def test_reauth_confirm_invalid_with_unique_id( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) mock_client.device_info.side_effect = InvalidEncryptionKeyAPIError result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/esphome/test_dashboard.py b/tests/components/esphome/test_dashboard.py index da805eb2eee..1641804e458 100644 --- a/tests/components/esphome/test_dashboard.py +++ b/tests/components/esphome/test_dashboard.py @@ -6,7 +6,7 @@ from unittest.mock import patch from aioesphomeapi import DeviceInfo, InvalidAuthAPIError from homeassistant.components.esphome import CONF_NOISE_PSK, coordinator, dashboard -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -150,7 +150,7 @@ async def test_new_info_reload_config_entries( async def test_new_dashboard_fix_reauth( - hass: HomeAssistant, mock_client, mock_config_entry, mock_dashboard + hass: HomeAssistant, mock_client, mock_config_entry: MockConfigEntry, mock_dashboard ) -> None: """Test config entries waiting for reauth are triggered.""" mock_client.device_info.side_effect = ( @@ -162,14 +162,7 @@ async def test_new_dashboard_fix_reauth( "homeassistant.components.esphome.coordinator.ESPHomeDashboardAPI.get_encryption_key", return_value=VALID_NOISE_PSK, ) as mock_get_encryption_key: - result = await hass.config_entries.flow.async_init( - "esphome", - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "unique_id": mock_config_entry.unique_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert len(mock_get_encryption_key.mock_calls) == 0 diff --git a/tests/components/esphome/test_manager.py b/tests/components/esphome/test_manager.py index 9d2a906466e..a14c83bf265 100644 --- a/tests/components/esphome/test_manager.py +++ b/tests/components/esphome/test_manager.py @@ -721,6 +721,34 @@ async def test_state_subscription( assert mock_client.send_home_assistant_state.mock_calls == [] +async def test_state_request( + mock_client: APIClient, + hass: HomeAssistant, + mock_esphome_device: Callable[ + [APIClient, list[EntityInfo], list[UserService], list[EntityState]], + Awaitable[MockESPHomeDevice], + ], +) -> None: + """Test ESPHome requests state change.""" + device: MockESPHomeDevice = await mock_esphome_device( + mock_client=mock_client, + entity_info=[], + user_service=[], + states=[], + ) + await hass.async_block_till_done() + hass.states.async_set("binary_sensor.test", "on", {"bool": True, "float": 3.0}) + device.mock_home_assistant_state_request("binary_sensor.test", None) + await hass.async_block_till_done() + assert mock_client.send_home_assistant_state.mock_calls == [ + call("binary_sensor.test", None, "on") + ] + mock_client.send_home_assistant_state.reset_mock() + hass.states.async_set("binary_sensor.test", "off", {"bool": False, "float": 5.0}) + await hass.async_block_till_done() + assert mock_client.send_home_assistant_state.mock_calls == [] + + async def test_debug_logging( mock_client: APIClient, hass: HomeAssistant, diff --git a/tests/components/evohome/__init__.py b/tests/components/evohome/__init__.py new file mode 100644 index 00000000000..588e0f61746 --- /dev/null +++ b/tests/components/evohome/__init__.py @@ -0,0 +1 @@ +"""The tests for the evohome integration.""" diff --git a/tests/components/evohome/conftest.py b/tests/components/evohome/conftest.py new file mode 100644 index 00000000000..260330896b7 --- /dev/null +++ b/tests/components/evohome/conftest.py @@ -0,0 +1,111 @@ +"""Fixtures and helpers for the evohome tests.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any, Final +from unittest.mock import MagicMock, patch + +from aiohttp import ClientSession +from evohomeasync2 import EvohomeClient +from evohomeasync2.broker import Broker +import pytest + +from homeassistant.components.evohome import CONF_PASSWORD, CONF_USERNAME, DOMAIN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonArrayType, JsonObjectType + +from .const import ACCESS_TOKEN, REFRESH_TOKEN + +from tests.common import load_json_array_fixture, load_json_object_fixture + +TEST_CONFIG: Final = { + CONF_USERNAME: "username", + CONF_PASSWORD: "password", +} + + +def user_account_config_fixture() -> JsonObjectType: + """Load JSON for the config of a user's account.""" + return load_json_object_fixture("user_account.json", DOMAIN) + + +def user_locations_config_fixture() -> JsonArrayType: + """Load JSON for the config of a user's installation (a list of locations).""" + return load_json_array_fixture("user_locations.json", DOMAIN) + + +def location_status_fixture(loc_id: str) -> JsonObjectType: + """Load JSON for the status of a specific location.""" + return load_json_object_fixture(f"status_{loc_id}.json", DOMAIN) + + +def dhw_schedule_fixture() -> JsonObjectType: + """Load JSON for the schedule of a domesticHotWater zone.""" + return load_json_object_fixture("schedule_dhw.json", DOMAIN) + + +def zone_schedule_fixture() -> JsonObjectType: + """Load JSON for the schedule of a temperatureZone zone.""" + return load_json_object_fixture("schedule_zone.json", DOMAIN) + + +async def mock_get( + self: Broker, url: str, **kwargs: Any +) -> JsonArrayType | JsonObjectType: + """Return the JSON for a HTTP get of a given URL.""" + + # a proxy for the behaviour of the real web API + if self.refresh_token is None: + self.refresh_token = f"new_{REFRESH_TOKEN}" + + if self.access_token_expires is None or self.access_token_expires < datetime.now(): + self.access_token = f"new_{ACCESS_TOKEN}" + self.access_token_expires = datetime.now() + timedelta(minutes=30) + + # assume a valid GET, and return the JSON for that web API + if url == "userAccount": # userAccount + return user_account_config_fixture() + + if url.startswith("location"): + if "installationInfo" in url: # location/installationInfo?userId={id} + return user_locations_config_fixture() + if "location" in url: # location/{id}/status + return location_status_fixture("2738909") + + elif "schedule" in url: + if url.startswith("domesticHotWater"): # domesticHotWater/{id}/schedule + return dhw_schedule_fixture() + if url.startswith("temperatureZone"): # temperatureZone/{id}/schedule + return zone_schedule_fixture() + + pytest.xfail(f"Unexpected URL: {url}") + + +@patch("evohomeasync2.broker.Broker.get", mock_get) +async def setup_evohome(hass: HomeAssistant, test_config: dict[str, str]) -> MagicMock: + """Set up the evohome integration and return its client. + + The class is mocked here to check the client was instantiated with the correct args. + """ + + with ( + patch("homeassistant.components.evohome.evo.EvohomeClient") as mock_client, + patch("homeassistant.components.evohome.ev1.EvohomeClient", return_value=None), + ): + mock_client.side_effect = EvohomeClient + + assert await async_setup_component(hass, DOMAIN, {DOMAIN: test_config}) + await hass.async_block_till_done() + + mock_client.assert_called_once() + + assert mock_client.call_args.args[0] == test_config[CONF_USERNAME] + assert mock_client.call_args.args[1] == test_config[CONF_PASSWORD] + + assert isinstance(mock_client.call_args.kwargs["session"], ClientSession) + + assert mock_client.account_info is not None + + return mock_client diff --git a/tests/components/evohome/const.py b/tests/components/evohome/const.py new file mode 100644 index 00000000000..0b298db533a --- /dev/null +++ b/tests/components/evohome/const.py @@ -0,0 +1,10 @@ +"""Constants for the evohome tests.""" + +from __future__ import annotations + +from typing import Final + +ACCESS_TOKEN: Final = "at_1dc7z657UKzbhKA..." +REFRESH_TOKEN: Final = "rf_jg68ZCKYdxEI3fF..." +SESSION_ID: Final = "F7181186..." +USERNAME: Final = "test_user@gmail.com" diff --git a/tests/components/evohome/fixtures/schedule_dhw.json b/tests/components/evohome/fixtures/schedule_dhw.json new file mode 100644 index 00000000000..da9a225fb82 --- /dev/null +++ b/tests/components/evohome/fixtures/schedule_dhw.json @@ -0,0 +1,81 @@ +{ + "dailySchedules": [ + { + "dayOfWeek": "Monday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Tuesday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Wednesday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Thursday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Friday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "08:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "22:30:00" } + ] + }, + { + "dayOfWeek": "Saturday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "09:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Sunday", + "switchpoints": [ + { "dhwState": "On", "timeOfDay": "06:30:00" }, + { "dhwState": "Off", "timeOfDay": "09:30:00" }, + { "dhwState": "On", "timeOfDay": "12:00:00" }, + { "dhwState": "Off", "timeOfDay": "13:00:00" }, + { "dhwState": "On", "timeOfDay": "16:30:00" }, + { "dhwState": "Off", "timeOfDay": "23:00:00" } + ] + } + ] +} diff --git a/tests/components/evohome/fixtures/schedule_zone.json b/tests/components/evohome/fixtures/schedule_zone.json new file mode 100644 index 00000000000..5030d92ff3d --- /dev/null +++ b/tests/components/evohome/fixtures/schedule_zone.json @@ -0,0 +1,67 @@ +{ + "dailySchedules": [ + { + "dayOfWeek": "Monday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Tuesday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Wednesday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Thursday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Friday", + "switchpoints": [ + { "heatSetpoint": 18.1, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:00:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Saturday", + "switchpoints": [ + { "heatSetpoint": 18.5, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:30:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + }, + { + "dayOfWeek": "Sunday", + "switchpoints": [ + { "heatSetpoint": 18.5, "timeOfDay": "07:00:00" }, + { "heatSetpoint": 16.0, "timeOfDay": "08:30:00" }, + { "heatSetpoint": 18.6, "timeOfDay": "22:10:00" }, + { "heatSetpoint": 15.9, "timeOfDay": "23:00:00" } + ] + } + ] +} diff --git a/tests/components/evohome/fixtures/status_2738909.json b/tests/components/evohome/fixtures/status_2738909.json new file mode 100644 index 00000000000..6d555ba4e3e --- /dev/null +++ b/tests/components/evohome/fixtures/status_2738909.json @@ -0,0 +1,125 @@ +{ + "locationId": "2738909", + "gateways": [ + { + "gatewayId": "2499896", + "temperatureControlSystems": [ + { + "systemId": "3432522", + "zones": [ + { + "zoneId": "3432521", + "name": "Dead Zone", + "temperatureStatus": { "isAvailable": false }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "activeFaults": [] + }, + { + "zoneId": "3432576", + "name": "Main Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "PermanentOverride" + }, + "activeFaults": [ + { + "faultType": "TempZoneActuatorCommunicationLost", + "since": "2022-03-02T15:56:01" + } + ] + }, + { + "zoneId": "3432577", + "name": "Front Room", + "temperatureStatus": { "temperature": 19.0, "isAvailable": true }, + "setpointStatus": { + "targetHeatTemperature": 21.0, + "setpointMode": "TemporaryOverride", + "until": "2022-03-07T19:00:00Z" + }, + "activeFaults": [ + { + "faultType": "TempZoneActuatorLowBattery", + "since": "2022-03-02T04:50:20" + } + ] + }, + { + "zoneId": "3432578", + "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "name": "Kitchen" + }, + { + "zoneId": "3432579", + "temperatureStatus": { "temperature": 20.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.0, + "setpointMode": "FollowSchedule" + }, + "name": "Bathroom Dn" + }, + { + "zoneId": "3432580", + "temperatureStatus": { "temperature": 21.0, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.0, + "setpointMode": "FollowSchedule" + }, + "name": "Main Bedroom" + }, + { + "zoneId": "3449703", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 17.0, + "setpointMode": "FollowSchedule" + }, + "name": "Kids Room" + }, + { + "zoneId": "3449740", + "temperatureStatus": { "temperature": 21.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 16.5, + "setpointMode": "FollowSchedule" + }, + "name": "" + }, + { + "zoneId": "3450733", + "temperatureStatus": { "temperature": 19.5, "isAvailable": true }, + "activeFaults": [], + "setpointStatus": { + "targetHeatTemperature": 14.0, + "setpointMode": "PermanentOverride" + }, + "name": "Spare Room" + } + ], + "dhw": { + "dhwId": "3933910", + "temperatureStatus": { "temperature": 23.0, "isAvailable": true }, + "stateStatus": { "state": "Off", "mode": "PermanentOverride" }, + "activeFaults": [] + }, + "activeFaults": [], + "systemModeStatus": { "mode": "AutoWithEco", "isPermanent": true } + } + ], + "activeFaults": [] + } + ] +} diff --git a/tests/components/evohome/fixtures/user_account.json b/tests/components/evohome/fixtures/user_account.json new file mode 100644 index 00000000000..99a96a7961e --- /dev/null +++ b/tests/components/evohome/fixtures/user_account.json @@ -0,0 +1,11 @@ +{ + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith", + "streetAddress": "1 Main Street", + "city": "London", + "postcode": "E1 1AA", + "country": "UnitedKingdom", + "language": "enGB" +} diff --git a/tests/components/evohome/fixtures/user_locations.json b/tests/components/evohome/fixtures/user_locations.json new file mode 100644 index 00000000000..cf59aa9ae8a --- /dev/null +++ b/tests/components/evohome/fixtures/user_locations.json @@ -0,0 +1,346 @@ +[ + { + "locationInfo": { + "locationId": "2738909", + "name": "My Home", + "streetAddress": "1 Main Street", + "city": "London", + "country": "UnitedKingdom", + "postcode": "E1 1AA", + "locationType": "Residential", + "useDaylightSaveSwitching": true, + "timeZone": { + "timeZoneId": "GMTStandardTime", + "displayName": "(UTC+00:00) Dublin, Edinburgh, Lisbon, London", + "offsetMinutes": 0, + "currentOffsetMinutes": 60, + "supportsDaylightSaving": true + }, + "locationOwner": { + "userId": "2263181", + "username": "user_2263181@gmail.com", + "firstname": "John", + "lastname": "Smith" + } + }, + "gateways": [ + { + "gatewayInfo": { + "gatewayId": "2499896", + "mac": "00D02DEE0000", + "crc": "1234", + "isWiFi": false + }, + "temperatureControlSystems": [ + { + "systemId": "3432522", + "modelType": "EvoTouch", + "zones": [ + { + "zoneId": "3432521", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Dead Zone", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432576", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432577", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Front Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432578", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Kitchen", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432579", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Bathroom Dn", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3432580", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Main Bedroom", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3449703", + "modelType": "HeatingZone", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Kids Room", + "zoneType": "RadiatorZone" + }, + { + "zoneId": "3449740", + "modelType": "Unknown", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "", + "zoneType": "Unknown" + }, + { + "zoneId": "3450733", + "modelType": "xx", + "setpointCapabilities": { + "maxHeatSetpoint": 35.0, + "minHeatSetpoint": 5.0, + "valueResolution": 0.5, + "canControlHeat": true, + "canControlCool": false, + "allowedSetpointModes": [ + "PermanentOverride", + "FollowSchedule", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilities": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00", + "setpointValueResolution": 0.5 + }, + "name": "Spare Room", + "zoneType": "xx" + } + ], + "dhw": { + "dhwId": "3933910", + "dhwStateCapabilitiesResponse": { + "allowedStates": ["On", "Off"], + "allowedModes": [ + "FollowSchedule", + "PermanentOverride", + "TemporaryOverride" + ], + "maxDuration": "1.00:00:00", + "timingResolution": "00:10:00" + }, + "scheduleCapabilitiesResponse": { + "maxSwitchpointsPerDay": 6, + "minSwitchpointsPerDay": 1, + "timingResolution": "00:10:00" + } + }, + "allowedSystemModes": [ + { + "systemMode": "HeatingOff", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "Auto", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithReset", + "canBePermanent": true, + "canBeTemporary": false + }, + { + "systemMode": "AutoWithEco", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "1.00:00:00", + "timingResolution": "01:00:00", + "timingMode": "Duration" + }, + { + "systemMode": "Away", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "DayOff", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + }, + { + "systemMode": "Custom", + "canBePermanent": true, + "canBeTemporary": true, + "maxDuration": "99.00:00:00", + "timingResolution": "1.00:00:00", + "timingMode": "Period" + } + ] + } + ] + } + ] + } +] diff --git a/tests/components/evohome/test_storage.py b/tests/components/evohome/test_storage.py new file mode 100644 index 00000000000..e87b847a9ff --- /dev/null +++ b/tests/components/evohome/test_storage.py @@ -0,0 +1,208 @@ +"""The tests for evohome storage load & save.""" + +from __future__ import annotations + +from datetime import datetime, timedelta +from typing import Any, Final, NotRequired, TypedDict + +import pytest + +from homeassistant.components.evohome import ( + CONF_PASSWORD, + CONF_USERNAME, + DOMAIN, + STORAGE_KEY, + STORAGE_VER, + dt_aware_to_naive, +) +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .conftest import setup_evohome +from .const import ACCESS_TOKEN, REFRESH_TOKEN, SESSION_ID, USERNAME + + +class _SessionDataT(TypedDict): + sessionId: str + + +class _TokenStoreT(TypedDict): + username: str + refresh_token: str + access_token: str + access_token_expires: str # 2024-07-27T23:57:30+01:00 + user_data: NotRequired[_SessionDataT] + + +class _EmptyStoreT(TypedDict): + pass + + +SZ_USERNAME: Final = "username" +SZ_REFRESH_TOKEN: Final = "refresh_token" +SZ_ACCESS_TOKEN: Final = "access_token" +SZ_ACCESS_TOKEN_EXPIRES: Final = "access_token_expires" +SZ_USER_DATA: Final = "user_data" + + +def dt_pair(dt_dtm: datetime) -> tuple[datetime, str]: + """Return a datetime without milliseconds and its string representation.""" + dt_str = dt_dtm.isoformat(timespec="seconds") # e.g. 2024-07-28T00:57:29+01:00 + return dt_util.parse_datetime(dt_str, raise_on_error=True), dt_str + + +ACCESS_TOKEN_EXP_DTM, ACCESS_TOKEN_EXP_STR = dt_pair(dt_util.now() + timedelta(hours=1)) + +USERNAME_DIFF: Final = f"not_{USERNAME}" +USERNAME_SAME: Final = USERNAME + +TEST_CONFIG: Final = { + CONF_USERNAME: USERNAME_SAME, + CONF_PASSWORD: "password", +} + +TEST_DATA: Final[dict[str, _TokenStoreT]] = { + "sans_session_id": { + SZ_USERNAME: USERNAME_SAME, + SZ_REFRESH_TOKEN: REFRESH_TOKEN, + SZ_ACCESS_TOKEN: ACCESS_TOKEN, + SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, + }, + "with_session_id": { + SZ_USERNAME: USERNAME_SAME, + SZ_REFRESH_TOKEN: REFRESH_TOKEN, + SZ_ACCESS_TOKEN: ACCESS_TOKEN, + SZ_ACCESS_TOKEN_EXPIRES: ACCESS_TOKEN_EXP_STR, + SZ_USER_DATA: {"sessionId": SESSION_ID}, + }, +} + +TEST_DATA_NULL: Final[dict[str, _EmptyStoreT | None]] = { + "store_is_absent": None, + "store_was_reset": {}, +} + +DOMAIN_STORAGE_BASE: Final = { + "version": STORAGE_VER, + "minor_version": 1, + "key": STORAGE_KEY, +} + + +@pytest.mark.parametrize("idx", TEST_DATA_NULL) +async def test_auth_tokens_null( + hass: HomeAssistant, + hass_storage: dict[str, Any], + idx: str, +) -> None: + """Test loading/saving authentication tokens when no cached tokens in the store.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA_NULL[idx]} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated without tokens, as cache was empty... + assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == f"new_{REFRESH_TOKEN}" + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_same( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens when matching username.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated with the cached tokens... + assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN_EXPIRES] == dt_aware_to_naive( + ACCESS_TOKEN_EXP_DTM + ) + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert data[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES]) == ACCESS_TOKEN_EXP_DTM + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_past( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens with matching username, but expired.""" + + dt_dtm, dt_str = dt_pair(dt_util.now() - timedelta(hours=1)) + + # make this access token have expired in the past... + test_data = TEST_DATA[idx].copy() # shallow copy is OK here + test_data[SZ_ACCESS_TOKEN_EXPIRES] = dt_str + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": test_data} + + mock_client = await setup_evohome(hass, TEST_CONFIG) + + # Confirm client was instantiated with the cached tokens... + assert mock_client.call_args.kwargs[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN] == ACCESS_TOKEN + assert mock_client.call_args.kwargs[SZ_ACCESS_TOKEN_EXPIRES] == dt_aware_to_naive( + dt_dtm + ) + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_SAME + assert data[SZ_REFRESH_TOKEN] == REFRESH_TOKEN + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) + + +@pytest.mark.parametrize("idx", TEST_DATA) +async def test_auth_tokens_diff( + hass: HomeAssistant, hass_storage: dict[str, Any], idx: str +) -> None: + """Test loading/saving authentication tokens when unmatched username.""" + + hass_storage[DOMAIN] = DOMAIN_STORAGE_BASE | {"data": TEST_DATA[idx]} + + mock_client = await setup_evohome( + hass, TEST_CONFIG | {CONF_USERNAME: USERNAME_DIFF} + ) + + # Confirm client was instantiated without tokens, as username was different... + assert SZ_REFRESH_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN not in mock_client.call_args.kwargs + assert SZ_ACCESS_TOKEN_EXPIRES not in mock_client.call_args.kwarg + + # Confirm the expected tokens were cached to storage... + data: _TokenStoreT = hass_storage[DOMAIN]["data"] + + assert data[SZ_USERNAME] == USERNAME_DIFF + assert data[SZ_REFRESH_TOKEN] == f"new_{REFRESH_TOKEN}" + assert data[SZ_ACCESS_TOKEN] == f"new_{ACCESS_TOKEN}" + assert ( + dt_util.parse_datetime(data[SZ_ACCESS_TOKEN_EXPIRES], raise_on_error=True) + > dt_util.now() + ) diff --git a/tests/components/fan/test_init.py b/tests/components/fan/test_init.py index a72ad5e48f6..a7dc544a97a 100644 --- a/tests/components/fan/test_init.py +++ b/tests/components/fan/test_init.py @@ -38,7 +38,7 @@ from tests.common import ( class BaseFan(FanEntity): """Implementation of the abstract FanEntity.""" - def __init__(self): + def __init__(self) -> None: """Initialize the fan.""" diff --git a/tests/components/ffmpeg/test_init.py b/tests/components/ffmpeg/test_init.py index 353b8fdfcc0..aa407d5b695 100644 --- a/tests/components/ffmpeg/test_init.py +++ b/tests/components/ffmpeg/test_init.py @@ -16,13 +16,13 @@ from homeassistant.const import ( EVENT_HOMEASSISTANT_STOP, ) from homeassistant.core import HomeAssistant, callback -from homeassistant.setup import async_setup_component, setup_component +from homeassistant.setup import async_setup_component -from tests.common import assert_setup_component, get_test_home_assistant +from tests.common import assert_setup_component @callback -def async_start(hass, entity_id=None): +def async_start(hass: HomeAssistant, entity_id: str | None = None) -> None: """Start a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -32,7 +32,7 @@ def async_start(hass, entity_id=None): @callback -def async_stop(hass, entity_id=None): +def async_stop(hass: HomeAssistant, entity_id: str | None = None) -> None: """Stop a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -42,7 +42,7 @@ def async_stop(hass, entity_id=None): @callback -def async_restart(hass, entity_id=None): +def async_restart(hass: HomeAssistant, entity_id: str | None = None) -> None: """Restart a FFmpeg process on entity. This is a legacy helper method. Do not use it for new tests. @@ -54,7 +54,12 @@ def async_restart(hass, entity_id=None): class MockFFmpegDev(ffmpeg.FFmpegBase): """FFmpeg device mock.""" - def __init__(self, hass, initial_state=True, entity_id="test.ffmpeg_device"): + def __init__( + self, + hass: HomeAssistant, + initial_state: bool = True, + entity_id: str = "test.ffmpeg_device", + ) -> None: """Initialize mock.""" super().__init__(None, initial_state) @@ -77,26 +82,22 @@ class MockFFmpegDev(ffmpeg.FFmpegBase): self.called_entities = entity_ids -def test_setup_component() -> None: +async def test_setup_component(hass: HomeAssistant) -> None: """Set up ffmpeg component.""" - with get_test_home_assistant() as hass: - with assert_setup_component(1): - setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + with assert_setup_component(1): + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" - hass.stop() + assert hass.data[ffmpeg.DATA_FFMPEG].binary == "ffmpeg" -def test_setup_component_test_service() -> None: +async def test_setup_component_test_service(hass: HomeAssistant) -> None: """Set up ffmpeg component test services.""" - with get_test_home_assistant() as hass: - with assert_setup_component(1): - setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) + with assert_setup_component(1): + await async_setup_component(hass, ffmpeg.DOMAIN, {ffmpeg.DOMAIN: {}}) - assert hass.services.has_service(ffmpeg.DOMAIN, "start") - assert hass.services.has_service(ffmpeg.DOMAIN, "stop") - assert hass.services.has_service(ffmpeg.DOMAIN, "restart") - hass.stop() + assert hass.services.has_service(ffmpeg.DOMAIN, "start") + assert hass.services.has_service(ffmpeg.DOMAIN, "stop") + assert hass.services.has_service(ffmpeg.DOMAIN, "restart") async def test_setup_component_test_register(hass: HomeAssistant) -> None: diff --git a/tests/components/fibaro/test_config_flow.py b/tests/components/fibaro/test_config_flow.py index b6b4e3992cd..508bb81973d 100644 --- a/tests/components/fibaro/test_config_flow.py +++ b/tests/components/fibaro/test_config_flow.py @@ -183,15 +183,7 @@ async def test_reauth_success( hass: HomeAssistant, mock_config_entry: MockConfigEntry ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -211,15 +203,7 @@ async def test_reauth_connect_failure( mock_fibaro_client: Mock, ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -244,15 +228,7 @@ async def test_reauth_auth_failure( mock_fibaro_client: Mock, ) -> None: """Successful reauth flow initialized by the user.""" - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) - + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/fido/test_sensor.py b/tests/components/fido/test_sensor.py index d47c7ce8e9f..654221cfacd 100644 --- a/tests/components/fido/test_sensor.py +++ b/tests/components/fido/test_sensor.py @@ -18,7 +18,7 @@ CONTRACT = "123456789" class FidoClientMock: """Fake Fido client.""" - def __init__(self, username, password, timeout=None, httpsession=None): + def __init__(self, username, password, timeout=None, httpsession=None) -> None: """Fake Fido client init.""" def get_phone_numbers(self): diff --git a/tests/components/file/test_config_flow.py b/tests/components/file/test_config_flow.py index 86ada1fec61..30d00411c44 100644 --- a/tests/components/file/test_config_flow.py +++ b/tests/components/file/test_config_flow.py @@ -7,6 +7,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.file import DOMAIN +from homeassistant.const import CONF_UNIT_OF_MEASUREMENT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -15,20 +16,22 @@ from tests.common import MockConfigEntry MOCK_CONFIG_NOTIFY = { "platform": "notify", "file_path": "some_file", - "timestamp": True, } +MOCK_OPTIONS_NOTIFY = {"timestamp": True} MOCK_CONFIG_SENSOR = { "platform": "sensor", "file_path": "some/path", - "value_template": "{{ value | round(1) }}", } - -pytestmark = pytest.mark.usefixtures("mock_setup_entry") +MOCK_OPTIONS_SENSOR = {"value_template": "{{ value | round(1) }}"} +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_form( hass: HomeAssistant, @@ -36,6 +39,7 @@ async def test_form( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test we get the form.""" result = await hass.config_entries.flow.async_init( @@ -50,7 +54,7 @@ async def test_form( ) await hass.async_block_till_done() - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input @@ -59,12 +63,17 @@ async def test_form( assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"] == data + assert result2["options"] == options assert len(mock_setup_entry.mock_calls) == 1 +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_already_configured( hass: HomeAssistant, @@ -72,9 +81,10 @@ async def test_already_configured( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test aborting if the entry is already configured.""" - entry = MockConfigEntry(domain=DOMAIN, data=data) + entry = MockConfigEntry(domain=DOMAIN, data=data, options=options) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( @@ -91,7 +101,7 @@ async def test_already_configured( assert result["type"] is FlowResultType.FORM assert result["step_id"] == platform - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -103,10 +113,14 @@ async def test_already_configured( assert result2["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_setup_entry") @pytest.mark.parametrize("is_allowed", [False], ids=["not_allowed"]) @pytest.mark.parametrize( - ("platform", "data"), - [("sensor", MOCK_CONFIG_SENSOR), ("notify", MOCK_CONFIG_NOTIFY)], + ("platform", "data", "options"), + [ + ("sensor", MOCK_CONFIG_SENSOR, MOCK_OPTIONS_SENSOR), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY), + ], ) async def test_not_allowed( hass: HomeAssistant, @@ -114,6 +128,7 @@ async def test_not_allowed( mock_is_allowed_path: bool, platform: str, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test aborting if the file path is not allowed.""" result = await hass.config_entries.flow.async_init( @@ -130,7 +145,7 @@ async def test_not_allowed( assert result["type"] is FlowResultType.FORM assert result["step_id"] == platform - user_input = dict(data) + user_input = {**data, **options} user_input.pop("platform") result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -140,3 +155,49 @@ async def test_not_allowed( assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"file_path": "not_allowed"} + + +@pytest.mark.parametrize( + ("platform", "data", "options", "new_options"), + [ + ( + "sensor", + MOCK_CONFIG_SENSOR, + MOCK_OPTIONS_SENSOR, + {CONF_UNIT_OF_MEASUREMENT: "mm"}, + ), + ("notify", MOCK_CONFIG_NOTIFY, MOCK_OPTIONS_NOTIFY, {"timestamp": False}), + ], +) +async def test_options_flow( + hass: HomeAssistant, + mock_is_allowed_path: bool, + platform: str, + data: dict[str, Any], + options: dict[str, Any], + new_options: dict[str, Any], +) -> None: + """Test options config flow.""" + entry = MockConfigEntry(domain=DOMAIN, data=data, options=options, version=2) + entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + result = await hass.config_entries.options.async_init(entry.entry_id) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "init" + + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input=new_options, + ) + await hass.async_block_till_done() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["data"] == new_options + + entry = hass.config_entries.async_get_entry(entry.entry_id) + assert entry.state is config_entries.ConfigEntryState.LOADED + assert entry.options == new_options diff --git a/tests/components/file/test_init.py b/tests/components/file/test_init.py new file mode 100644 index 00000000000..faf1488ed07 --- /dev/null +++ b/tests/components/file/test_init.py @@ -0,0 +1,65 @@ +"""The tests for local file init.""" + +from unittest.mock import MagicMock, Mock, patch + +from homeassistant.components.file import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, get_fixture_path + + +@patch("os.path.isfile", Mock(return_value=True)) +@patch("os.access", Mock(return_value=True)) +async def test_migration_to_version_2( + hass: HomeAssistant, mock_is_allowed_path: MagicMock +) -> None: + """Test the File sensor with JSON entries.""" + data = { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + "value_template": "{{ value_json.temperature }}", + } + + entry = MockConfigEntry( + domain=DOMAIN, + version=1, + data=data, + title=f"test [{data['file_path']}]", + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.LOADED + assert entry.version == 2 + assert entry.data == { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + } + assert entry.options == { + "value_template": "{{ value_json.temperature }}", + } + + +@patch("os.path.isfile", Mock(return_value=True)) +@patch("os.access", Mock(return_value=True)) +async def test_migration_from_future_version( + hass: HomeAssistant, mock_is_allowed_path: MagicMock +) -> None: + """Test the File sensor with JSON entries.""" + data = { + "platform": "sensor", + "name": "file2", + "file_path": get_fixture_path("file_value_template.txt", "file"), + "value_template": "{{ value_json.temperature }}", + } + + entry = MockConfigEntry( + domain=DOMAIN, version=3, data=data, title=f"test [{data['file_path']}]" + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + + assert entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/file/test_notify.py b/tests/components/file/test_notify.py index faa9027aa21..33e4739a488 100644 --- a/tests/components/file/test_notify.py +++ b/tests/components/file/test_notify.py @@ -174,7 +174,7 @@ async def test_legacy_notify_file_exception( @pytest.mark.parametrize( - ("timestamp", "data"), + ("timestamp", "data", "options"), [ ( False, @@ -182,6 +182,8 @@ async def test_legacy_notify_file_exception( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, ), @@ -191,6 +193,8 @@ async def test_legacy_notify_file_exception( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": True, }, ), @@ -203,6 +207,7 @@ async def test_legacy_notify_file_entry_only_setup( timestamp: bool, mock_is_allowed_path: MagicMock, data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test the legacy notify file output in entry only setup.""" filename = "mock_file" @@ -213,7 +218,11 @@ async def test_legacy_notify_file_entry_only_setup( message = params["message"] entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -252,7 +261,7 @@ async def test_legacy_notify_file_entry_only_setup( @pytest.mark.parametrize( - ("is_allowed", "config"), + ("is_allowed", "config", "options"), [ ( False, @@ -260,6 +269,8 @@ async def test_legacy_notify_file_entry_only_setup( "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, ), @@ -271,10 +282,15 @@ async def test_legacy_notify_file_not_allowed( caplog: pytest.LogCaptureFixture, mock_is_allowed_path: MagicMock, config: dict[str, Any], + options: dict[str, Any], ) -> None: """Test legacy notify file output not allowed.""" entry = MockConfigEntry( - domain=DOMAIN, data=config, title=f"test [{config['file_path']}]" + domain=DOMAIN, + data=config, + version=2, + options=options, + title=f"test [{config['file_path']}]", ) entry.add_to_hass(hass) assert not await hass.config_entries.async_setup(entry.entry_id) @@ -293,13 +309,15 @@ async def test_legacy_notify_file_not_allowed( ], ) @pytest.mark.parametrize( - ("data", "is_allowed"), + ("data", "options", "is_allowed"), [ ( { "name": "test", "platform": "notify", "file_path": "mock_file", + }, + { "timestamp": False, }, True, @@ -314,12 +332,17 @@ async def test_notify_file_write_access_failed( service: str, params: dict[str, Any], data: dict[str, Any], + options: dict[str, Any], ) -> None: """Test the notify file fails.""" domain = notify.DOMAIN entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/file/test_sensor.py b/tests/components/file/test_sensor.py index 60a81df2b1e..634ae9d626c 100644 --- a/tests/components/file/test_sensor.py +++ b/tests/components/file/test_sensor.py @@ -47,7 +47,11 @@ async def test_file_value_entry_setup( } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -66,11 +70,17 @@ async def test_file_value_template( "platform": "sensor", "name": "file2", "file_path": get_fixture_path("file_value_template.txt", "file"), + } + options = { "value_template": "{{ value_json.temperature }}", } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options=options, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -90,7 +100,11 @@ async def test_file_empty(hass: HomeAssistant, mock_is_allowed_path: MagicMock) } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) @@ -113,7 +127,11 @@ async def test_file_path_invalid( } entry = MockConfigEntry( - domain=DOMAIN, data=data, title=f"test [{data['file_path']}]" + domain=DOMAIN, + data=data, + version=2, + options={}, + title=f"test [{data['file_path']}]", ) entry.add_to_hass(hass) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/file_upload/test_init.py b/tests/components/file_upload/test_init.py index 149bbb7ee2f..22ad9323f05 100644 --- a/tests/components/file_upload/test_init.py +++ b/tests/components/file_upload/test_init.py @@ -3,6 +3,7 @@ from contextlib import contextmanager from pathlib import Path from random import getrandbits +from typing import Any from unittest.mock import patch import pytest @@ -141,7 +142,7 @@ async def test_upload_large_file_fails( yield MockPathOpen() class MockPathOpen: - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: pass def write(self, data: bytes) -> None: diff --git a/tests/components/fireservicerota/test_config_flow.py b/tests/components/fireservicerota/test_config_flow.py index 539906d800b..5555a8d649c 100644 --- a/tests/components/fireservicerota/test_config_flow.py +++ b/tests/components/fireservicerota/test_config_flow.py @@ -120,23 +120,8 @@ async def test_reauth(hass: HomeAssistant) -> None: domain=DOMAIN, data=MOCK_CONF, unique_id=MOCK_CONF[CONF_USERNAME] ) entry.add_to_hass(hass) - with patch( - "homeassistant.components.fireservicerota.config_flow.FireServiceRota" - ) as mock_fsr: - mock_fireservicerota = mock_fsr.return_value - mock_fireservicerota.request_tokens.return_value = MOCK_TOKEN_INFO - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - }, - data=MOCK_CONF, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.FORM + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM with ( patch( diff --git a/tests/components/fitbit/test_config_flow.py b/tests/components/fitbit/test_config_flow.py index d5f3d09abdd..6f717459486 100644 --- a/tests/components/fitbit/test_config_flow.py +++ b/tests/components/fitbit/test_config_flow.py @@ -472,13 +472,7 @@ async def test_reauth_flow( assert len(entries) == 1 # config_entry.req initiates reauth - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -546,13 +540,7 @@ async def test_reauth_wrong_user_id( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/flic/test_binary_sensor.py b/tests/components/flic/test_binary_sensor.py index 44db1d6ea1b..cdc1d64db41 100644 --- a/tests/components/flic/test_binary_sensor.py +++ b/tests/components/flic/test_binary_sensor.py @@ -8,7 +8,7 @@ from homeassistant.setup import async_setup_component class _MockFlicClient: - def __init__(self, button_addresses): + def __init__(self, button_addresses) -> None: self.addresses = button_addresses self.get_info_callback = None self.scan_wizard = None diff --git a/tests/components/flick_electric/test_config_flow.py b/tests/components/flick_electric/test_config_flow.py index 1b3ed1de34d..85a6495d3c5 100644 --- a/tests/components/flick_electric/test_config_flow.py +++ b/tests/components/flick_electric/test_config_flow.py @@ -6,6 +6,7 @@ from pyflick.authentication import AuthException from homeassistant import config_entries from homeassistant.components.flick_electric.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -15,7 +16,7 @@ from tests.common import MockConfigEntry CONF = {CONF_USERNAME: "test-username", CONF_PASSWORD: "test-password"} -async def _flow_submit(hass): +async def _flow_submit(hass: HomeAssistant) -> ConfigFlowResult: return await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER}, diff --git a/tests/components/flume/test_config_flow.py b/tests/components/flume/test_config_flow.py index 915299223e9..87fe3a2bbf0 100644 --- a/tests/components/flume/test_config_flow.py +++ b/tests/components/flume/test_config_flow.py @@ -124,11 +124,7 @@ async def test_reauth(hass: HomeAssistant, requests_mock: Mocker) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": "test@test.org"}, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/flux/test_switch.py b/tests/components/flux/test_switch.py index f957083dd11..ab0e8a556c4 100644 --- a/tests/components/flux/test_switch.py +++ b/tests/components/flux/test_switch.py @@ -1,5 +1,6 @@ """The tests for the Flux switch platform.""" +from datetime import date, datetime from unittest.mock import patch from freezegun import freeze_time @@ -187,7 +188,9 @@ async def test_flux_when_switch_is_off( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -242,7 +245,9 @@ async def test_flux_before_sunrise( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=5) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -364,7 +369,9 @@ async def test_flux_after_sunrise_before_sunset( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -426,7 +433,9 @@ async def test_flux_after_sunset_before_stop( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -489,7 +498,9 @@ async def test_flux_after_stop_before_sunrise( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -551,7 +562,9 @@ async def test_flux_with_custom_start_stop_times( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -618,7 +631,9 @@ async def test_flux_before_sunrise_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -684,7 +699,9 @@ async def test_flux_after_sunrise_before_sunset_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -750,7 +767,9 @@ async def test_flux_after_sunset_before_midnight_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -816,7 +835,9 @@ async def test_flux_after_sunset_after_midnight_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -882,7 +903,9 @@ async def test_flux_after_stop_before_sunrise_stop_next_day( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -945,7 +968,9 @@ async def test_flux_with_custom_colortemps( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1010,7 +1035,9 @@ async def test_flux_with_custom_brightness( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1091,7 +1118,9 @@ async def test_flux_with_multiple_lights( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1158,7 +1187,9 @@ async def test_flux_with_mired( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, now: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time @@ -1219,7 +1250,9 @@ async def test_flux_with_rgb( sunset_time = test_time.replace(hour=17, minute=0, second=0) sunrise_time = test_time.replace(hour=5, minute=0, second=0) - def event_date(hass, event, now=None): + def event_date( + hass: HomeAssistant, event: str, date: date | datetime | None = None + ) -> datetime | None: if event == SUN_EVENT_SUNRISE: return sunrise_time return sunset_time diff --git a/tests/components/folder_watcher/test_init.py b/tests/components/folder_watcher/test_init.py index 8309988931a..965ae33c4f8 100644 --- a/tests/components/folder_watcher/test_init.py +++ b/tests/components/folder_watcher/test_init.py @@ -36,7 +36,7 @@ def test_event() -> None: class MockPatternMatchingEventHandler: """Mock base class for the pattern matcher event handler.""" - def __init__(self, patterns): + def __init__(self, patterns) -> None: pass with patch( @@ -66,7 +66,7 @@ def test_move_event() -> None: class MockPatternMatchingEventHandler: """Mock base class for the pattern matcher event handler.""" - def __init__(self, patterns): + def __init__(self, patterns) -> None: pass with patch( diff --git a/tests/components/fritz/conftest.py b/tests/components/fritz/conftest.py index bb049f067b4..fa92fa37c04 100644 --- a/tests/components/fritz/conftest.py +++ b/tests/components/fritz/conftest.py @@ -30,7 +30,7 @@ class FritzServiceMock(Service): class FritzConnectionMock: """FritzConnection mocking.""" - def __init__(self, services): + def __init__(self, services) -> None: """Init Mocking class.""" self.modelname = MOCK_MODELNAME self.call_action = self._call_action diff --git a/tests/components/fritz/test_config_flow.py b/tests/components/fritz/test_config_flow.py index a54acbb0ac0..deefe7e4e77 100644 --- a/tests/components/fritz/test_config_flow.py +++ b/tests/components/fritz/test_config_flow.py @@ -23,12 +23,7 @@ from homeassistant.components.fritz.const import ( FRITZ_AUTH_EXCEPTIONS, ) from homeassistant.components.ssdp import ATTR_UPNP_UDN -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -310,6 +305,9 @@ async def test_reauth_successful( mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -335,15 +333,6 @@ async def test_reauth_successful( mock_request_post.return_value.status_code = 200 mock_request_post.return_value.text = MOCK_REQUEST - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -376,20 +365,14 @@ async def test_reauth_not_successful( mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.fritz.config_flow.FritzConnection", side_effect=side_effect, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/fritzbox/__init__.py b/tests/components/fritzbox/__init__.py index 61312805e91..bd68615212d 100644 --- a/tests/components/fritzbox/__init__.py +++ b/tests/components/fritzbox/__init__.py @@ -115,6 +115,13 @@ class FritzDeviceClimateMock(FritzEntityBaseMock): scheduled_preset = PRESET_ECO +class FritzDeviceClimateWithoutTempSensorMock(FritzDeviceClimateMock): + """Mock of a AVM Fritz!Box climate device without exposing temperature sensor.""" + + temperature = None + has_temperature_sensor = False + + class FritzDeviceSensorMock(FritzEntityBaseMock): """Mock of a AVM Fritz!Box sensor device.""" @@ -173,6 +180,7 @@ class FritzDeviceLightMock(FritzEntityBaseMock): level = 100 present = True state = True + color_temp = None class FritzDeviceCoverMock(FritzEntityBaseMock): @@ -187,3 +195,9 @@ class FritzDeviceCoverMock(FritzEntityBaseMock): has_thermostat = False has_blind = True levelpercentage = 0 + + +class FritzDeviceCoverUnknownPositionMock(FritzDeviceCoverMock): + """Mock of a AVM Fritz!Box cover device with unknown position.""" + + levelpercentage = None diff --git a/tests/components/fritzbox/test_climate.py b/tests/components/fritzbox/test_climate.py index 853c09c534b..358eeaa714e 100644 --- a/tests/components/fritzbox/test_climate.py +++ b/tests/components/fritzbox/test_climate.py @@ -46,7 +46,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError import homeassistant.util.dt as dt_util -from . import FritzDeviceClimateMock, set_devices, setup_config_entry +from . import ( + FritzDeviceClimateMock, + FritzDeviceClimateWithoutTempSensorMock, + set_devices, + setup_config_entry, +) from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -162,6 +167,18 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state.state == PRESET_COMFORT +async def test_hkr_wo_temperature_sensor(hass: HomeAssistant, fritz: Mock) -> None: + """Test hkr without exposing dedicated temperature sensor data block.""" + device = FritzDeviceClimateWithoutTempSensorMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + assert state.attributes[ATTR_CURRENT_TEMPERATURE] == 18.0 + + async def test_target_temperature_on(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device on.""" device = FritzDeviceClimateMock() diff --git a/tests/components/fritzbox/test_config_flow.py b/tests/components/fritzbox/test_config_flow.py index 72d36a8ab63..fd53bd2e637 100644 --- a/tests/components/fritzbox/test_config_flow.py +++ b/tests/components/fritzbox/test_config_flow.py @@ -12,12 +12,7 @@ from requests.exceptions import HTTPError from homeassistant.components import ssdp from homeassistant.components.fritzbox.const import DOMAIN from homeassistant.components.ssdp import ATTR_UPNP_FRIENDLY_NAME, ATTR_UPNP_UDN -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_DEVICES, CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -129,12 +124,7 @@ async def test_reauth_success(hass: HomeAssistant, fritz: Mock) -> None: """Test starting a reauthentication flow.""" mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -158,12 +148,7 @@ async def test_reauth_auth_failed(hass: HomeAssistant, fritz: Mock) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -186,12 +171,7 @@ async def test_reauth_not_successful(hass: HomeAssistant, fritz: Mock) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/fritzbox/test_cover.py b/tests/components/fritzbox/test_cover.py index 6c301fc8f46..6626db2bccf 100644 --- a/tests/components/fritzbox/test_cover.py +++ b/tests/components/fritzbox/test_cover.py @@ -3,7 +3,12 @@ from datetime import timedelta from unittest.mock import Mock, call -from homeassistant.components.cover import ATTR_CURRENT_POSITION, ATTR_POSITION, DOMAIN +from homeassistant.components.cover import ( + ATTR_CURRENT_POSITION, + ATTR_POSITION, + DOMAIN, + STATE_OPEN, +) from homeassistant.components.fritzbox.const import DOMAIN as FB_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -12,11 +17,17 @@ from homeassistant.const import ( SERVICE_OPEN_COVER, SERVICE_SET_COVER_POSITION, SERVICE_STOP_COVER, + STATE_UNKNOWN, ) from homeassistant.core import HomeAssistant import homeassistant.util.dt as dt_util -from . import FritzDeviceCoverMock, set_devices, setup_config_entry +from . import ( + FritzDeviceCoverMock, + FritzDeviceCoverUnknownPositionMock, + set_devices, + setup_config_entry, +) from .const import CONF_FAKE_NAME, MOCK_CONFIG from tests.common import async_fire_time_changed @@ -33,9 +44,22 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: state = hass.states.get(ENTITY_ID) assert state + assert state.state == STATE_OPEN assert state.attributes[ATTR_CURRENT_POSITION] == 100 +async def test_unknown_position(hass: HomeAssistant, fritz: Mock) -> None: + """Test cover with unknown position.""" + device = FritzDeviceCoverUnknownPositionMock() + assert await setup_config_entry( + hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz + ) + + state = hass.states.get(ENTITY_ID) + assert state + assert state.state == STATE_UNKNOWN + + async def test_open_cover(hass: HomeAssistant, fritz: Mock) -> None: """Test opening the cover.""" device = FritzDeviceCoverMock() diff --git a/tests/components/fritzbox/test_init.py b/tests/components/fritzbox/test_init.py index c84498b1560..56e3e7a5738 100644 --- a/tests/components/fritzbox/test_init.py +++ b/tests/components/fritzbox/test_init.py @@ -18,6 +18,7 @@ from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, + EVENT_HOMEASSISTANT_STOP, STATE_UNAVAILABLE, UnitOfTemperature, ) @@ -199,6 +200,35 @@ async def test_unload_remove(hass: HomeAssistant, fritz: Mock) -> None: assert state is None +async def test_logout_on_stop(hass: HomeAssistant, fritz: Mock) -> None: + """Test we log out from fritzbox when Home Assistants stops.""" + fritz().get_devices.return_value = [FritzDeviceSwitchMock()] + entity_id = f"{SWITCH_DOMAIN}.{CONF_FAKE_NAME}" + + entry = MockConfigEntry( + domain=FB_DOMAIN, + data=MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], + unique_id=entity_id, + ) + entry.add_to_hass(hass) + + config_entries = hass.config_entries.async_entries(FB_DOMAIN) + assert len(config_entries) == 1 + assert entry is config_entries[0] + + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + assert entry.state is ConfigEntryState.LOADED + state = hass.states.get(entity_id) + assert state + + hass.bus.async_fire(EVENT_HOMEASSISTANT_STOP) + await hass.async_block_till_done() + + assert fritz().logout.call_count == 1 + + async def test_remove_device( hass: HomeAssistant, device_registry: dr.DeviceRegistry, diff --git a/tests/components/fritzbox/test_light.py b/tests/components/fritzbox/test_light.py index 45920c7c3ee..3cafa933fa3 100644 --- a/tests/components/fritzbox/test_light.py +++ b/tests/components/fritzbox/test_light.py @@ -3,6 +3,7 @@ from datetime import timedelta from unittest.mock import Mock, call +import pytest from requests.exceptions import HTTPError from homeassistant.components.fritzbox.const import ( @@ -12,12 +13,14 @@ from homeassistant.components.fritzbox.const import ( ) from homeassistant.components.light import ( ATTR_BRIGHTNESS, + ATTR_COLOR_MODE, ATTR_COLOR_TEMP_KELVIN, ATTR_HS_COLOR, ATTR_MAX_COLOR_TEMP_KELVIN, ATTR_MIN_COLOR_TEMP_KELVIN, ATTR_SUPPORTED_COLOR_MODES, DOMAIN, + ColorMode, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -56,9 +59,11 @@ async def test_setup(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.COLOR_TEMP assert state.attributes[ATTR_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_MIN_COLOR_TEMP_KELVIN] == 2700 assert state.attributes[ATTR_MAX_COLOR_TEMP_KELVIN] == 6500 + assert state.attributes[ATTR_HS_COLOR] == (28.395, 65.723) assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -99,6 +104,9 @@ async def test_setup_non_color_non_level(hass: HomeAssistant, fritz: Mock) -> No assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" assert ATTR_BRIGHTNESS not in state.attributes assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["onoff"] + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.ONOFF + assert state.attributes.get(ATTR_COLOR_TEMP_KELVIN) is None + assert state.attributes.get(ATTR_HS_COLOR) is None async def test_setup_color(hass: HomeAssistant, fritz: Mock) -> None: @@ -120,6 +128,8 @@ async def test_setup_color(hass: HomeAssistant, fritz: Mock) -> None: assert state assert state.state == STATE_ON assert state.attributes[ATTR_FRIENDLY_NAME] == "fake_name" + assert state.attributes[ATTR_COLOR_MODE] == ColorMode.HS + assert state.attributes[ATTR_COLOR_TEMP_KELVIN] is None assert state.attributes[ATTR_BRIGHTNESS] == 100 assert state.attributes[ATTR_HS_COLOR] == (100, 70) assert state.attributes[ATTR_SUPPORTED_COLOR_MODES] == ["color_temp", "hs"] @@ -183,16 +193,16 @@ async def test_turn_on_color_unsupported_api_method( device.get_colors.return_value = { "Red": [("100", "70", "10"), ("100", "50", "10"), ("100", "30", "10")] } - mockresponse = Mock() - mockresponse.status_code = 400 - - error = HTTPError("Bad Request") - error.response = mockresponse - device.set_unmapped_color.side_effect = error - assert await setup_config_entry( hass, MOCK_CONFIG[FB_DOMAIN][CONF_DEVICES][0], ENTITY_ID, device, fritz ) + + # test fallback to `setcolor` + error = HTTPError("Bad Request") + error.response = Mock() + error.response.status_code = 400 + device.set_unmapped_color.side_effect = error + await hass.services.async_call( DOMAIN, SERVICE_TURN_ON, @@ -205,6 +215,16 @@ async def test_turn_on_color_unsupported_api_method( assert device.set_level.call_args_list == [call(100)] assert device.set_color.call_args_list == [call((100, 70))] + # test for unknown error + error.response.status_code = 500 + with pytest.raises(HTTPError, match="Bad Request"): + await hass.services.async_call( + DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: ENTITY_ID, ATTR_BRIGHTNESS: 100, ATTR_HS_COLOR: (100, 70)}, + True, + ) + async def test_turn_off(hass: HomeAssistant, fritz: Mock) -> None: """Test turn device off.""" diff --git a/tests/components/fronius/__init__.py b/tests/components/fronius/__init__.py index 2109d4a6692..57b22490ed0 100644 --- a/tests/components/fronius/__init__.py +++ b/tests/components/fronius/__init__.py @@ -3,9 +3,12 @@ from __future__ import annotations from collections.abc import Callable +from datetime import timedelta import json from typing import Any +from freezegun.api import FrozenDateTimeFactory + from homeassistant.components.fronius.const import DOMAIN from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_HOST @@ -114,7 +117,12 @@ def mock_responses( ) -async def enable_all_entities(hass, freezer, config_entry_id, time_till_next_update): +async def enable_all_entities( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + config_entry_id: str, + time_till_next_update: timedelta, +) -> None: """Enable all entities for a config entry and fast forward time to receive data.""" registry = er.async_get(hass) entities = er.async_entries_for_config_entry(registry, config_entry_id) diff --git a/tests/components/frontier_silicon/test_config_flow.py b/tests/components/frontier_silicon/test_config_flow.py index 04bd1febdf8..a6c1ba1e74f 100644 --- a/tests/components/frontier_silicon/test_config_flow.py +++ b/tests/components/frontier_silicon/test_config_flow.py @@ -356,15 +356,7 @@ async def test_reauth_flow(hass: HomeAssistant, config_entry: MockConfigEntry) - config_entry.add_to_hass(hass) assert config_entry.data[CONF_PIN] == "1234" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "device_config" @@ -395,15 +387,7 @@ async def test_reauth_flow_friendly_name_error( config_entry.add_to_hass(hass) assert config_entry.data[CONF_PIN] == "1234" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "device_config" diff --git a/tests/components/fujitsu_fglair/__init__.py b/tests/components/fujitsu_fglair/__init__.py new file mode 100644 index 00000000000..2ec3fa0fce6 --- /dev/null +++ b/tests/components/fujitsu_fglair/__init__.py @@ -0,0 +1,21 @@ +"""Tests for the Fujitsu HVAC (based on Ayla IOT) integration.""" + +from ayla_iot_unofficial.fujitsu_hvac import FujitsuHVAC + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + +def entity_id(device: FujitsuHVAC) -> str: + """Generate the entity id for the given serial.""" + return f"{Platform.CLIMATE}.{device.device_serial_number}" diff --git a/tests/components/fujitsu_fglair/conftest.py b/tests/components/fujitsu_fglair/conftest.py new file mode 100644 index 00000000000..b73007a566b --- /dev/null +++ b/tests/components/fujitsu_fglair/conftest.py @@ -0,0 +1,113 @@ +"""Common fixtures for the Fujitsu HVAC (based on Ayla IOT) tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, create_autospec, patch + +from ayla_iot_unofficial import AylaApi +from ayla_iot_unofficial.fujitsu_hvac import FanSpeed, FujitsuHVAC, OpMode, SwingMode +import pytest + +from homeassistant.components.fujitsu_fglair.const import CONF_EUROPE, DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.common import MockConfigEntry + +TEST_DEVICE_NAME = "Test device" +TEST_DEVICE_SERIAL = "testserial" +TEST_USERNAME = "test-username" +TEST_PASSWORD = "test-password" + +TEST_USERNAME2 = "test-username2" +TEST_PASSWORD2 = "test-password2" + +TEST_SERIAL_NUMBER = "testserial123" +TEST_SERIAL_NUMBER2 = "testserial345" + +TEST_PROPERTY_VALUES = { + "model_name": "mock_fujitsu_device", + "mcu_firmware_version": "1", +} + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.fujitsu_fglair.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_ayla_api(mock_devices: list[AsyncMock]) -> Generator[AsyncMock]: + """Override AylaApi creation.""" + my_mock = create_autospec(AylaApi) + + with ( + patch( + "homeassistant.components.fujitsu_fglair.new_ayla_api", return_value=my_mock + ), + patch( + "homeassistant.components.fujitsu_fglair.config_flow.new_ayla_api", + return_value=my_mock, + ), + ): + my_mock.async_get_devices.return_value = mock_devices + yield my_mock + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return a regular config entry.""" + return MockConfigEntry( + domain=DOMAIN, + unique_id=TEST_USERNAME, + data={ + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + }, + ) + + +def _create_device(serial_number: str) -> AsyncMock: + dev = AsyncMock(spec=FujitsuHVAC) + dev.device_serial_number = serial_number + dev.device_name = serial_number + dev.property_values = TEST_PROPERTY_VALUES + dev.has_capability.return_value = True + dev.fan_speed = FanSpeed.AUTO + dev.supported_fan_speeds = [ + FanSpeed.LOW, + FanSpeed.MEDIUM, + FanSpeed.HIGH, + FanSpeed.AUTO, + ] + dev.op_mode = OpMode.COOL + dev.supported_op_modes = [ + OpMode.OFF, + OpMode.ON, + OpMode.AUTO, + OpMode.COOL, + OpMode.DRY, + ] + dev.swing_mode = SwingMode.SWING_BOTH + dev.supported_swing_modes = [ + SwingMode.OFF, + SwingMode.SWING_HORIZONTAL, + SwingMode.SWING_VERTICAL, + SwingMode.SWING_BOTH, + ] + dev.temperature_range = [18.0, 26.0] + dev.sensed_temp = 22.0 + dev.set_temp = 21.0 + + return dev + + +@pytest.fixture +def mock_devices() -> list[AsyncMock]: + """Generate a list of mock devices that the API can return.""" + return [ + _create_device(serial) for serial in (TEST_SERIAL_NUMBER, TEST_SERIAL_NUMBER2) + ] diff --git a/tests/components/fujitsu_fglair/snapshots/test_climate.ambr b/tests/components/fujitsu_fglair/snapshots/test_climate.ambr new file mode 100644 index 00000000000..31b143c6f95 --- /dev/null +++ b/tests/components/fujitsu_fglair/snapshots/test_climate.ambr @@ -0,0 +1,189 @@ +# serializer version: 1 +# name: test_entities[climate.testserial123-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 26.0, + 'min_temp': 18.0, + 'swing_modes': list([ + 'off', + 'horizontal', + 'vertical', + 'both', + ]), + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.testserial123', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'fujitsu_fglair', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'testserial123', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[climate.testserial123-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.0, + 'fan_mode': 'auto', + 'fan_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'friendly_name': 'testserial123', + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 26.0, + 'min_temp': 18.0, + 'supported_features': , + 'swing_mode': 'both', + 'swing_modes': list([ + 'off', + 'horizontal', + 'vertical', + 'both', + ]), + 'target_temp_step': 0.5, + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.testserial123', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- +# name: test_entities[climate.testserial345-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'fan_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 26.0, + 'min_temp': 18.0, + 'swing_modes': list([ + 'off', + 'horizontal', + 'vertical', + 'both', + ]), + 'target_temp_step': 0.5, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.testserial345', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'fujitsu_fglair', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'testserial345', + 'unit_of_measurement': None, + }) +# --- +# name: test_entities[climate.testserial345-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': 22.0, + 'fan_mode': 'auto', + 'fan_modes': list([ + 'low', + 'medium', + 'high', + 'auto', + ]), + 'friendly_name': 'testserial345', + 'hvac_modes': list([ + , + , + , + , + ]), + 'max_temp': 26.0, + 'min_temp': 18.0, + 'supported_features': , + 'swing_mode': 'both', + 'swing_modes': list([ + 'off', + 'horizontal', + 'vertical', + 'both', + ]), + 'target_temp_step': 0.5, + 'temperature': 21.0, + }), + 'context': , + 'entity_id': 'climate.testserial345', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'cool', + }) +# --- diff --git a/tests/components/fujitsu_fglair/test_climate.py b/tests/components/fujitsu_fglair/test_climate.py new file mode 100644 index 00000000000..daddc83a871 --- /dev/null +++ b/tests/components/fujitsu_fglair/test_climate.py @@ -0,0 +1,98 @@ +"""Test for the climate entities of Fujitsu HVAC.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.climate import ( + ATTR_FAN_MODE, + ATTR_HVAC_MODE, + ATTR_SWING_MODE, + ATTR_TEMPERATURE, + DOMAIN as CLIMATE_DOMAIN, + FAN_AUTO, + SERVICE_SET_FAN_MODE, + SERVICE_SET_HVAC_MODE, + SERVICE_SET_SWING_MODE, + SERVICE_SET_TEMPERATURE, + SWING_BOTH, + HVACMode, +) +from homeassistant.components.fujitsu_fglair.climate import ( + HA_TO_FUJI_FAN, + HA_TO_FUJI_HVAC, + HA_TO_FUJI_SWING, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import entity_id, setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_entities( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that coordinator returns the data we expect after the first refresh.""" + await setup_integration(hass, mock_config_entry) + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_set_attributes( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_ayla_api: AsyncMock, + mock_devices: list[AsyncMock], + mock_config_entry: MockConfigEntry, +) -> None: + """Test that setting the attributes calls the correct functions on the device.""" + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_HVAC_MODE, + service_data={ATTR_HVAC_MODE: HVACMode.COOL}, + target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, + blocking=True, + ) + mock_devices[0].async_set_op_mode.assert_called_once_with( + HA_TO_FUJI_HVAC[HVACMode.COOL] + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_FAN_MODE, + service_data={ATTR_FAN_MODE: FAN_AUTO}, + target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, + blocking=True, + ) + mock_devices[0].async_set_fan_speed.assert_called_once_with( + HA_TO_FUJI_FAN[FAN_AUTO] + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_SWING_MODE, + service_data={ATTR_SWING_MODE: SWING_BOTH}, + target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, + blocking=True, + ) + mock_devices[0].async_set_swing_mode.assert_called_once_with( + HA_TO_FUJI_SWING[SWING_BOTH] + ) + + await hass.services.async_call( + CLIMATE_DOMAIN, + SERVICE_SET_TEMPERATURE, + service_data={ATTR_TEMPERATURE: 23.0}, + target={ATTR_ENTITY_ID: entity_id(mock_devices[0])}, + blocking=True, + ) + mock_devices[0].async_set_set_temp.assert_called_once_with(23.0) diff --git a/tests/components/fujitsu_fglair/test_config_flow.py b/tests/components/fujitsu_fglair/test_config_flow.py new file mode 100644 index 00000000000..2828cf95339 --- /dev/null +++ b/tests/components/fujitsu_fglair/test_config_flow.py @@ -0,0 +1,182 @@ +"""Test the Fujitsu HVAC (based on Ayla IOT) config flow.""" + +from unittest.mock import AsyncMock + +from ayla_iot_unofficial import AylaAuthError +import pytest + +from homeassistant.components.fujitsu_fglair.const import CONF_EUROPE, DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResult, FlowResultType + +from .conftest import TEST_PASSWORD, TEST_PASSWORD2, TEST_USERNAME + +from tests.common import MockConfigEntry + + +async def _initial_step(hass: HomeAssistant) -> FlowResult: + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + return await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + }, + ) + + +async def test_full_flow( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_ayla_api: AsyncMock +) -> None: + """Test full config flow.""" + result = await _initial_step(hass) + mock_ayla_api.async_sign_in.assert_called_once() + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"FGLair ({TEST_USERNAME})" + assert result["data"] == { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + } + + +async def test_duplicate_entry( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that re-adding the same account fails.""" + mock_config_entry.add_to_hass(hass) + result = await _initial_step(hass) + mock_ayla_api.async_sign_in.assert_not_called() + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.parametrize( + ("exception", "err_msg"), + [ + (AylaAuthError, "invalid_auth"), + (TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ayla_api: AsyncMock, + exception: Exception, + err_msg: str, +) -> None: + """Test we handle exceptions.""" + + mock_ayla_api.async_sign_in.side_effect = exception + result = await _initial_step(hass) + mock_ayla_api.async_sign_in.assert_called_once() + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": err_msg} + + mock_ayla_api.async_sign_in.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == f"FGLair ({TEST_USERNAME})" + assert result["data"] == { + CONF_USERNAME: TEST_USERNAME, + CONF_PASSWORD: TEST_PASSWORD, + CONF_EUROPE: False, + } + + +async def test_reauth_success( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: TEST_PASSWORD2, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 + + +@pytest.mark.parametrize( + ("exception", "err_msg"), + [ + (AylaAuthError, "invalid_auth"), + (TimeoutError, "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_reauth_exceptions( + hass: HomeAssistant, + exception: Exception, + err_msg: str, + mock_setup_entry: AsyncMock, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reauth flow when an exception occurs.""" + mock_config_entry.add_to_hass(hass) + + result = await mock_config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + + mock_ayla_api.async_sign_in.side_effect = exception + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: TEST_PASSWORD2, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + assert result["errors"] == {"base": err_msg} + + mock_ayla_api.async_sign_in.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_PASSWORD: TEST_PASSWORD2, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + assert mock_config_entry.data[CONF_PASSWORD] == TEST_PASSWORD2 diff --git a/tests/components/fujitsu_fglair/test_init.py b/tests/components/fujitsu_fglair/test_init.py new file mode 100644 index 00000000000..fa67ea08661 --- /dev/null +++ b/tests/components/fujitsu_fglair/test_init.py @@ -0,0 +1,128 @@ +"""Test the initialization of fujitsu_fglair entities.""" + +from unittest.mock import AsyncMock + +from ayla_iot_unofficial import AylaAuthError +from freezegun.api import FrozenDateTimeFactory +import pytest + +from homeassistant.components.fujitsu_fglair.const import API_REFRESH, DOMAIN +from homeassistant.const import STATE_UNAVAILABLE, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import entity_id, setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_auth_failure( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_devices: list[AsyncMock], +) -> None: + """Test entities become unavailable after auth failure.""" + await setup_integration(hass, mock_config_entry) + + mock_ayla_api.async_get_devices.side_effect = AylaAuthError + freezer.tick(API_REFRESH) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id(mock_devices[0])).state == STATE_UNAVAILABLE + assert hass.states.get(entity_id(mock_devices[1])).state == STATE_UNAVAILABLE + + +async def test_device_auth_failure( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, + mock_devices: list[AsyncMock], +) -> None: + """Test entities become unavailable after auth failure with updating devices.""" + await setup_integration(hass, mock_config_entry) + + for d in mock_ayla_api.async_get_devices.return_value: + d.async_update.side_effect = AylaAuthError + + freezer.tick(API_REFRESH) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id(mock_devices[0])).state == STATE_UNAVAILABLE + assert hass.states.get(entity_id(mock_devices[1])).state == STATE_UNAVAILABLE + + +async def test_token_expired( + hass: HomeAssistant, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Make sure sign_in is called if the token expired.""" + mock_ayla_api.token_expired = True + await setup_integration(hass, mock_config_entry) + + # Called once during setup and once during update + assert mock_ayla_api.async_sign_in.call_count == 2 + + +async def test_token_expiring_soon( + hass: HomeAssistant, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Make sure sign_in is called if the token expired.""" + mock_ayla_api.token_expiring_soon = True + await setup_integration(hass, mock_config_entry) + + mock_ayla_api.async_refresh_auth.assert_called_once() + + +@pytest.mark.parametrize("exception", [AylaAuthError, TimeoutError]) +async def test_startup_exception( + hass: HomeAssistant, + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, + exception: Exception, +) -> None: + """Make sure that no devices are added if there was an exception while logging in.""" + mock_ayla_api.async_sign_in.side_effect = exception + await setup_integration(hass, mock_config_entry) + + assert len(hass.states.async_all()) == 0 + + +async def test_one_device_disabled( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, + mock_devices: list[AsyncMock], + mock_ayla_api: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that coordinator only updates devices that are currently listening.""" + await setup_integration(hass, mock_config_entry) + + for d in mock_devices: + d.async_update.assert_called_once() + d.reset_mock() + + entity = entity_registry.async_get( + entity_registry.async_get_entity_id( + Platform.CLIMATE, DOMAIN, mock_devices[0].device_serial_number + ) + ) + entity_registry.async_update_entity( + entity.entity_id, disabled_by=er.RegistryEntryDisabler.USER + ) + await hass.async_block_till_done() + freezer.tick(API_REFRESH) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == len(mock_devices) - 1 + mock_devices[0].async_update.assert_not_called() + mock_devices[1].async_update.assert_called_once() diff --git a/tests/components/fully_kiosk/test_number.py b/tests/components/fully_kiosk/test_number.py index 2fbbf751725..5f74002f8cd 100644 --- a/tests/components/fully_kiosk/test_number.py +++ b/tests/components/fully_kiosk/test_number.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock from homeassistant.components import number from homeassistant.components.fully_kiosk.const import DOMAIN, UPDATE_INTERVAL from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.util import dt as dt_util @@ -81,9 +81,11 @@ async def test_numbers( assert device_entry.sw_version == "1.42.5" -def set_value(hass, entity_id, value): +async def set_value( + hass: HomeAssistant, entity_id: str, value: float +) -> ServiceResponse: """Set the value of a number entity.""" - return hass.services.async_call( + return await hass.services.async_call( number.DOMAIN, "set_value", {ATTR_ENTITY_ID: entity_id, number.ATTR_VALUE: value}, diff --git a/tests/components/fully_kiosk/test_switch.py b/tests/components/fully_kiosk/test_switch.py index 5b3b5e651b0..14a464e0dcd 100644 --- a/tests/components/fully_kiosk/test_switch.py +++ b/tests/components/fully_kiosk/test_switch.py @@ -5,7 +5,7 @@ from unittest.mock import MagicMock from homeassistant.components import switch from homeassistant.components.fully_kiosk.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import MockConfigEntry, async_fire_mqtt_message @@ -149,8 +149,10 @@ def has_subscribed(mqtt_mock: MqttMockHAClient, topic: str) -> bool: return False -def call_service(hass, service, entity_id): +async def call_service( + hass: HomeAssistant, service: str, entity_id: str +) -> ServiceResponse: """Call any service on entity.""" - return hass.services.async_call( + return await hass.services.async_call( switch.DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True ) diff --git a/tests/components/fyta/conftest.py b/tests/components/fyta/conftest.py index 6a67ae75ec2..2bcad9b3c80 100644 --- a/tests/components/fyta/conftest.py +++ b/tests/components/fyta/conftest.py @@ -4,6 +4,7 @@ from collections.abc import Generator from datetime import UTC, datetime from unittest.mock import AsyncMock, patch +from fyta_cli.fyta_models import Credentials, Plant import pytest from homeassistant.components.fyta.const import CONF_EXPIRATION, DOMAIN as FYTA_DOMAIN @@ -35,23 +36,27 @@ def mock_config_entry() -> MockConfigEntry: def mock_fyta_connector(): """Build a fixture for the Fyta API that connects successfully and returns one device.""" + plants: dict[int, Plant] = { + 0: Plant.from_dict(load_json_object_fixture("plant_status1.json", FYTA_DOMAIN)), + 1: Plant.from_dict(load_json_object_fixture("plant_status2.json", FYTA_DOMAIN)), + } + mock_fyta_connector = AsyncMock() mock_fyta_connector.expiration = datetime.fromisoformat(EXPIRATION).replace( tzinfo=UTC ) mock_fyta_connector.client = AsyncMock(autospec=True) - mock_fyta_connector.update_all_plants.return_value = load_json_object_fixture( - "plant_status.json", FYTA_DOMAIN - ) - mock_fyta_connector.plant_list = load_json_object_fixture( - "plant_list.json", FYTA_DOMAIN - ) + mock_fyta_connector.update_all_plants.return_value = plants + mock_fyta_connector.plant_list = { + 0: "Gummibaum", + 1: "Kakaobaum", + } mock_fyta_connector.login = AsyncMock( - return_value={ - CONF_ACCESS_TOKEN: ACCESS_TOKEN, - CONF_EXPIRATION: datetime.fromisoformat(EXPIRATION).replace(tzinfo=UTC), - } + return_value=Credentials( + access_token=ACCESS_TOKEN, + expiration=datetime.fromisoformat(EXPIRATION).replace(tzinfo=UTC), + ) ) with ( patch( diff --git a/tests/components/fyta/fixtures/plant_list.json b/tests/components/fyta/fixtures/plant_list.json deleted file mode 100644 index 9527c7d9d96..00000000000 --- a/tests/components/fyta/fixtures/plant_list.json +++ /dev/null @@ -1,4 +0,0 @@ -{ - "0": "Gummibaum", - "1": "Kakaobaum" -} diff --git a/tests/components/fyta/fixtures/plant_status.json b/tests/components/fyta/fixtures/plant_status.json deleted file mode 100644 index 5d9cb2d31d9..00000000000 --- a/tests/components/fyta/fixtures/plant_status.json +++ /dev/null @@ -1,14 +0,0 @@ -{ - "0": { - "name": "Gummibaum", - "scientific_name": "Ficus elastica", - "status": 1, - "sw_version": "1.0" - }, - "1": { - "name": "Kakaobaum", - "scientific_name": "Theobroma cacao", - "status": 2, - "sw_version": "1.0" - } -} diff --git a/tests/components/fyta/fixtures/plant_status1.json b/tests/components/fyta/fixtures/plant_status1.json new file mode 100644 index 00000000000..f2e8dc9c970 --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status1.json @@ -0,0 +1,23 @@ +{ + "battery_level": 80, + "battery_status": true, + "last_updated": "2023-01-10 10:10:00", + "light": 2, + "light_status": 3, + "nickname": "Gummibaum", + "moisture": 61, + "moisture_status": 3, + "sensor_available": true, + "sw_version": "1.0", + "status": 3, + "online": true, + "ph": null, + "plant_id": 0, + "plant_origin_path": "", + "plant_thumb_path": "", + "salinity": 1, + "salinity_status": 4, + "scientific_name": "Ficus elastica", + "temperature": 25.2, + "temperature_status": 3 +} diff --git a/tests/components/fyta/fixtures/plant_status2.json b/tests/components/fyta/fixtures/plant_status2.json new file mode 100644 index 00000000000..a5c2735ca7c --- /dev/null +++ b/tests/components/fyta/fixtures/plant_status2.json @@ -0,0 +1,23 @@ +{ + "battery_level": 80, + "battery_status": true, + "last_updated": "2023-01-02 10:10:00", + "light": 2, + "light_status": 3, + "nickname": "Kakaobaum", + "moisture": 61, + "moisture_status": 3, + "sensor_available": true, + "sw_version": "1.0", + "status": 3, + "online": true, + "ph": 7, + "plant_id": 0, + "plant_origin_path": "", + "plant_thumb_path": "", + "salinity": 1, + "salinity_status": 4, + "scientific_name": "Theobroma cacao", + "temperature": 25.2, + "temperature_status": 3 +} diff --git a/tests/components/fyta/snapshots/test_diagnostics.ambr b/tests/components/fyta/snapshots/test_diagnostics.ambr index 7491310129b..cf6bcdb77ad 100644 --- a/tests/components/fyta/snapshots/test_diagnostics.ambr +++ b/tests/components/fyta/snapshots/test_diagnostics.ambr @@ -23,16 +23,50 @@ }), 'plant_data': dict({ '0': dict({ + 'battery_level': 80.0, + 'battery_status': True, + 'last_updated': '2023-01-10T10:10:00', + 'light': 2.0, + 'light_status': 3, + 'moisture': 61.0, + 'moisture_status': 3, 'name': 'Gummibaum', + 'online': True, + 'ph': None, + 'plant_id': 0, + 'plant_origin_path': '', + 'plant_thumb_path': '', + 'salinity': 1.0, + 'salinity_status': 4, 'scientific_name': 'Ficus elastica', - 'status': 1, + 'sensor_available': True, + 'status': 3, 'sw_version': '1.0', + 'temperature': 25.2, + 'temperature_status': 3, }), '1': dict({ + 'battery_level': 80.0, + 'battery_status': True, + 'last_updated': '2023-01-02T10:10:00', + 'light': 2.0, + 'light_status': 3, + 'moisture': 61.0, + 'moisture_status': 3, 'name': 'Kakaobaum', + 'online': True, + 'ph': 7.0, + 'plant_id': 0, + 'plant_origin_path': '', + 'plant_thumb_path': '', + 'salinity': 1.0, + 'salinity_status': 4, 'scientific_name': 'Theobroma cacao', - 'status': 2, + 'sensor_available': True, + 'status': 3, 'sw_version': '1.0', + 'temperature': 25.2, + 'temperature_status': 3, }), }), }) diff --git a/tests/components/fyta/snapshots/test_sensor.ambr b/tests/components/fyta/snapshots/test_sensor.ambr index 1041fff501e..2e96de0a283 100644 --- a/tests/components/fyta/snapshots/test_sensor.ambr +++ b/tests/components/fyta/snapshots/test_sensor.ambr @@ -1,4 +1,334 @@ # serializer version: 1 +# name: test_all_entities[sensor.gummibaum_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.gummibaum_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.gummibaum_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Gummibaum Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-light', + 'unit_of_measurement': 'μmol/s⋅m²', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Gummibaum Light', + 'state_class': , + 'unit_of_measurement': 'μmol/s⋅m²', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_light_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_light_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-light_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_light_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Light state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_light_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_moisture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-moisture', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Gummibaum Moisture', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.gummibaum_moisture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_moisture_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'moisture_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-moisture_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_moisture_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Moisture state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_moisture_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.gummibaum_ph-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_ph', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'pH', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-ph', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_ph-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'ph', + 'friendly_name': 'Gummibaum pH', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_ph', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- # name: test_all_entities[sensor.gummibaum_plant_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -56,7 +386,122 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'doing_great', + 'state': 'no_sensor', + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_salinity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'conductivity', + 'friendly_name': 'Gummibaum Salinity', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_salinity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_salinity_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-salinity_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_salinity_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Salinity state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_salinity_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'high', }) # --- # name: test_all_entities[sensor.gummibaum_scientific_name-entry] @@ -105,6 +550,451 @@ 'state': 'Ficus elastica', }) # --- +# name: test_all_entities[sensor.gummibaum_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Gummibaum Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.gummibaum_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.2', + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.gummibaum_temperature_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-0-temperature_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.gummibaum_temperature_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Gummibaum Temperature state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.gummibaum_temperature_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_battery-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.kakaobaum_battery', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Battery', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-battery_level', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_battery-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'battery', + 'friendly_name': 'Kakaobaum Battery', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_battery', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '80.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-light', + 'unit_of_measurement': 'μmol/s⋅m²', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Kakaobaum Light', + 'state_class': , + 'unit_of_measurement': 'μmol/s⋅m²', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_light_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Light state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-light_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_light_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Light state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_light_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_moisture', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-moisture', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'moisture', + 'friendly_name': 'Kakaobaum Moisture', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_moisture', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '61.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_moisture_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Moisture state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'moisture_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-moisture_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_moisture_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Moisture state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_moisture_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_ph-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_ph', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'pH', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-ph', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_ph-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'ph', + 'friendly_name': 'Kakaobaum pH', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_ph', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.0', + }) +# --- # name: test_all_entities[sensor.kakaobaum_plant_state-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -162,7 +1052,122 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'need_attention', + 'state': 'no_sensor', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_salinity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'conductivity', + 'friendly_name': 'Kakaobaum Salinity', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_salinity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.0', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_salinity_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Salinity state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'salinity_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-salinity_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_salinity_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Salinity state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_salinity_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'high', }) # --- # name: test_all_entities[sensor.kakaobaum_scientific_name-entry] @@ -211,3 +1216,118 @@ 'state': 'Theobroma cacao', }) # --- +# name: test_all_entities[sensor.kakaobaum_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Kakaobaum Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.2', + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.kakaobaum_temperature_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature state', + 'platform': 'fyta', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'temperature_status', + 'unique_id': 'ce5f5431554d101905d31797e1232da8-1-temperature_status', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.kakaobaum_temperature_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Kakaobaum Temperature state', + 'options': list([ + 'no_data', + 'too_low', + 'low', + 'perfect', + 'high', + 'too_high', + ]), + }), + 'context': , + 'entity_id': 'sensor.kakaobaum_temperature_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'perfect', + }) +# --- diff --git a/tests/components/fyta/test_config_flow.py b/tests/components/fyta/test_config_flow.py index df0626d0af0..e47b78aa893 100644 --- a/tests/components/fyta/test_config_flow.py +++ b/tests/components/fyta/test_config_flow.py @@ -158,11 +158,7 @@ async def test_reauth( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/generic_hygrostat/test_humidifier.py b/tests/components/generic_hygrostat/test_humidifier.py index fc46db48664..2beaf423201 100644 --- a/tests/components/generic_hygrostat/test_humidifier.py +++ b/tests/components/generic_hygrostat/test_humidifier.py @@ -32,10 +32,12 @@ from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant, + ServiceCall, State, callback, ) from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.typing import StateType from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -206,7 +208,7 @@ async def test_unique_id( assert entry.unique_id == unique_id -def _setup_sensor(hass, humidity): +def _setup_sensor(hass: HomeAssistant, humidity: StateType) -> None: """Set up the test sensor.""" hass.states.async_set(ENT_SENSOR, humidity) @@ -669,13 +671,13 @@ async def test_operation_mode_humidify(hass: HomeAssistant) -> None: assert call.data["entity_id"] == ENT_SWITCH -async def _setup_switch(hass, is_on): +async def _setup_switch(hass: HomeAssistant, is_on: bool) -> list[ServiceCall]: """Set up the test switch.""" hass.states.async_set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF) calls = [] @callback - def log_call(call): + def log_call(call: ServiceCall) -> None: """Log service calls.""" calls.append(call) @@ -1615,7 +1617,7 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: assert state.state == STATE_OFF -async def _setup_humidifier(hass): +async def _setup_humidifier(hass: HomeAssistant) -> None: assert await async_setup_component( hass, DOMAIN, @@ -1635,7 +1637,9 @@ async def _setup_humidifier(hass): await hass.async_block_till_done() -def _mock_restore_cache(hass, humidity=40, state=STATE_OFF): +def _mock_restore_cache( + hass: HomeAssistant, humidity: int = 40, state: str = STATE_OFF +) -> None: mock_restore_cache( hass, ( diff --git a/tests/components/generic_thermostat/test_climate.py b/tests/components/generic_thermostat/test_climate.py index 0f438056fbd..f1c41270a2f 100644 --- a/tests/components/generic_thermostat/test_climate.py +++ b/tests/components/generic_thermostat/test_climate.py @@ -40,11 +40,13 @@ from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, CoreState, HomeAssistant, + ServiceCall, State, callback, ) from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.typing import StateType from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM @@ -208,7 +210,7 @@ async def test_unique_id( assert entry.unique_id == unique_id -def _setup_sensor(hass, temp): +def _setup_sensor(hass: HomeAssistant, temp: StateType) -> None: """Set up the test sensor.""" hass.states.async_set(ENT_SENSOR, temp) @@ -594,13 +596,13 @@ async def test_hvac_mode_heat(hass: HomeAssistant) -> None: assert call.data["entity_id"] == ENT_SWITCH -def _setup_switch(hass, is_on): +def _setup_switch(hass: HomeAssistant, is_on: bool) -> list[ServiceCall]: """Set up the test switch.""" hass.states.async_set(ENT_SWITCH, STATE_ON if is_on else STATE_OFF) calls = [] @callback - def log_call(call): + def log_call(call: ServiceCall) -> None: """Log service calls.""" calls.append(call) @@ -1374,7 +1376,7 @@ async def test_restore_state_uncoherence_case(hass: HomeAssistant) -> None: assert state.state == HVACMode.OFF -async def _setup_climate(hass): +async def _setup_climate(hass: HomeAssistant) -> None: assert await async_setup_component( hass, DOMAIN, @@ -1393,7 +1395,9 @@ async def _setup_climate(hass): ) -def _mock_restore_cache(hass, temperature=20, hvac_mode=HVACMode.OFF): +def _mock_restore_cache( + hass: HomeAssistant, temperature: int = 20, hvac_mode: HVACMode = HVACMode.OFF +) -> None: mock_restore_cache( hass, ( diff --git a/tests/components/geocaching/test_config_flow.py b/tests/components/geocaching/test_config_flow.py index 0c2ce66b513..5db89de0868 100644 --- a/tests/components/geocaching/test_config_flow.py +++ b/tests/components/geocaching/test_config_flow.py @@ -14,7 +14,7 @@ from homeassistant.components.geocaching.const import ( ENVIRONMENT, ENVIRONMENT_URLS, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -195,9 +195,7 @@ async def test_reauthentication( """Test Geocaching reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH} - ) + result = await mock_config_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/geonetnz_volcano/test_config_flow.py b/tests/components/geonetnz_volcano/test_config_flow.py index b074bdffa20..110fb3b0a9e 100644 --- a/tests/components/geonetnz_volcano/test_config_flow.py +++ b/tests/components/geonetnz_volcano/test_config_flow.py @@ -3,7 +3,8 @@ from datetime import timedelta from unittest.mock import patch -from homeassistant.components.geonetnz_volcano import config_flow +from homeassistant.components.geonetnz_volcano import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, @@ -20,19 +21,18 @@ async def test_duplicate_error(hass: HomeAssistant, config_entry) -> None: conf = {CONF_LATITUDE: -41.2, CONF_LONGITUDE: 174.7, CONF_RADIUS: 25} config_entry.add_to_hass(hass) - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - result = await flow.async_step_user(user_input=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) assert result["errors"] == {"base": "already_configured"} async def test_show_form(hass: HomeAssistant) -> None: """Test that the form is served with no input.""" - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - - result = await flow.async_step_user(user_input=None) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=None + ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -48,9 +48,6 @@ async def test_step_import(hass: HomeAssistant) -> None: CONF_SCAN_INTERVAL: timedelta(minutes=4), } - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - with ( patch( "homeassistant.components.geonetnz_volcano.async_setup_entry", @@ -60,7 +57,9 @@ async def test_step_import(hass: HomeAssistant) -> None: "homeassistant.components.geonetnz_volcano.async_setup", return_value=True ), ): - result = await flow.async_step_import(import_config=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT}, data=conf + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { @@ -78,9 +77,6 @@ async def test_step_user(hass: HomeAssistant) -> None: hass.config.longitude = 174.7 conf = {CONF_RADIUS: 25} - flow = config_flow.GeonetnzVolcanoFlowHandler() - flow.hass = hass - with ( patch( "homeassistant.components.geonetnz_volcano.async_setup_entry", @@ -90,7 +86,9 @@ async def test_step_user(hass: HomeAssistant) -> None: "homeassistant.components.geonetnz_volcano.async_setup", return_value=True ), ): - result = await flow.async_step_user(user_input=conf) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER}, data=conf + ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "-41.2, 174.7" assert result["data"] == { diff --git a/tests/components/glances/test_config_flow.py b/tests/components/glances/test_config_flow.py index a7d6934e32d..0fabc387a4f 100644 --- a/tests/components/glances/test_config_flow.py +++ b/tests/components/glances/test_config_flow.py @@ -89,15 +89,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=glances.DOMAIN, data=MOCK_USER_INPUT) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - glances.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_USER_INPUT, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["description_placeholders"] == {"username": "username"} @@ -128,15 +120,7 @@ async def test_reauth_fails( entry.add_to_hass(hass) mock_api.return_value.get_ha_sensor_data.side_effect = [error, HA_SENSOR_DATA] - result = await hass.config_entries.flow.async_init( - glances.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_USER_INPUT, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["description_placeholders"] == {"username": "username"} diff --git a/tests/components/google/test_calendar.py b/tests/components/google/test_calendar.py index 903b68a5cf2..11d4ec46bd1 100644 --- a/tests/components/google/test_calendar.py +++ b/tests/components/google/test_calendar.py @@ -74,7 +74,7 @@ def upcoming_event_url(entity: str = TEST_ENTITY) -> str: class Client: """Test client with helper methods for calendar websocket.""" - def __init__(self, client): + def __init__(self, client) -> None: """Initialize Client.""" self.client = client self.id = 0 diff --git a/tests/components/google/test_config_flow.py b/tests/components/google/test_config_flow.py index 47156299b57..b7962921ffd 100644 --- a/tests/components/google/test_config_flow.py +++ b/tests/components/google/test_config_flow.py @@ -130,7 +130,7 @@ async def primary_calendar( ) -async def fire_alarm(hass, point_in_time): +async def fire_alarm(hass: HomeAssistant, point_in_time: datetime.datetime) -> None: """Fire an alarm and wait for callbacks to run.""" with freeze_time(point_in_time): async_fire_time_changed(hass, point_in_time) @@ -497,14 +497,7 @@ async def test_reauth_flow( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -761,14 +754,7 @@ async def test_web_reauth_flow( entries = hass.config_entries.async_entries(DOMAIN) assert len(entries) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/google_generative_ai_conversation/test_conversation.py b/tests/components/google_generative_ai_conversation/test_conversation.py index a8eae34e08b..1ea5c2ad9b8 100644 --- a/tests/components/google_generative_ai_conversation/test_conversation.py +++ b/tests/components/google_generative_ai_conversation/test_conversation.py @@ -1,5 +1,6 @@ """Tests for the Google Generative AI Conversation integration conversation platform.""" +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from freezegun import freeze_time @@ -215,7 +216,9 @@ async def test_function_call( }, ) - def tool_call(hass, tool_input, tool_context): + def tool_call( + hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext + ) -> dict[str, Any]: mock_part.function_call = None mock_part.text = "Hi there!" return {"result": "Test response"} @@ -314,7 +317,9 @@ async def test_function_call_without_parameters( mock_part = MagicMock() mock_part.function_call = FunctionCall(name="test_tool", args={}) - def tool_call(hass, tool_input, tool_context): + def tool_call( + hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext + ) -> dict[str, Any]: mock_part.function_call = None mock_part.text = "Hi there!" return {"result": "Test response"} @@ -400,7 +405,9 @@ async def test_function_exception( mock_part = MagicMock() mock_part.function_call = FunctionCall(name="test_tool", args={"param1": 1}) - def tool_call(hass, tool_input, tool_context): + def tool_call( + hass: HomeAssistant, tool_input: llm.ToolInput, tool_context: llm.LLMContext + ) -> dict[str, Any]: mock_part.function_call = None mock_part.text = "Hi there!" raise HomeAssistantError("Test tool exception") diff --git a/tests/components/google_pubsub/test_init.py b/tests/components/google_pubsub/test_init.py index fba561f6df1..97e499d5d6d 100644 --- a/tests/components/google_pubsub/test_init.py +++ b/tests/components/google_pubsub/test_init.py @@ -4,6 +4,7 @@ from collections.abc import Generator from dataclasses import dataclass from datetime import datetime import os +from typing import Any from unittest.mock import MagicMock, Mock, patch import pytest @@ -111,7 +112,7 @@ async def test_full_config(hass: HomeAssistant, mock_client) -> None: ) -async def _setup(hass, filter_config): +async def _setup(hass: HomeAssistant, filter_config: dict[str, Any]) -> None: """Shared set up for filtering tests.""" config = { google_pubsub.DOMAIN: { diff --git a/tests/components/google_tasks/snapshots/test_todo.ambr b/tests/components/google_tasks/snapshots/test_todo.ambr index af8dec6a182..76611ba4a31 100644 --- a/tests/components/google_tasks/snapshots/test_todo.ambr +++ b/tests/components/google_tasks/snapshots/test_todo.ambr @@ -79,9 +79,6 @@ }), ]) # --- -# name: test_move_todo_item[api_responses0].4 - None -# --- # name: test_parent_child_ordering[api_responses0] list([ dict({ diff --git a/tests/components/google_translate/test_tts.py b/tests/components/google_translate/test_tts.py index 41cecd8cd98..1f199a5db97 100644 --- a/tests/components/google_translate/test_tts.py +++ b/tests/components/google_translate/test_tts.py @@ -30,9 +30,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) @@ -104,7 +103,7 @@ async def mock_config_entry_setup(hass: HomeAssistant, config: dict[str, Any]) - "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -161,7 +160,7 @@ async def test_tts_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_de_com", + ATTR_ENTITY_ID: "tts.google_translate_de_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -217,7 +216,7 @@ async def test_service_say_german_config( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_LANGUAGE: "de", @@ -274,7 +273,7 @@ async def test_service_say_german_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_co_uk", + ATTR_ENTITY_ID: "tts.google_translate_en_co_uk", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, @@ -330,7 +329,7 @@ async def test_service_say_en_uk_config( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_LANGUAGE: "en-uk", @@ -387,7 +386,7 @@ async def test_service_say_en_uk_service( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", tts.ATTR_OPTIONS: {"tld": "co.uk"}, @@ -444,7 +443,7 @@ async def test_service_say_en_couk( "mock_config_entry_setup", "speak", { - ATTR_ENTITY_ID: "tts.google_en_com", + ATTR_ENTITY_ID: "tts.google_translate_en_com", tts.ATTR_MEDIA_PLAYER_ENTITY_ID: "media_player.something", tts.ATTR_MESSAGE: "There is a person at the front door.", }, diff --git a/tests/components/google_wifi/test_sensor.py b/tests/components/google_wifi/test_sensor.py index c7df2b4e822..af870a2136d 100644 --- a/tests/components/google_wifi/test_sensor.py +++ b/tests/components/google_wifi/test_sensor.py @@ -2,6 +2,7 @@ from datetime import datetime, timedelta from http import HTTPStatus +from typing import Any from unittest.mock import Mock, patch import requests_mock @@ -78,7 +79,9 @@ async def test_setup_get( assert_setup_component(6, "sensor") -def setup_api(hass, data, requests_mock): +def setup_api( + hass: HomeAssistant | None, data: str | None, requests_mock: requests_mock.Mocker +) -> tuple[google_wifi.GoogleWifiAPI, dict[str, Any]]: """Set up API with fake data.""" resource = f"http://localhost{google_wifi.ENDPOINT}" now = datetime(1970, month=1, day=1) @@ -101,7 +104,7 @@ def setup_api(hass, data, requests_mock): return api, sensor_dict -def fake_delay(hass, ha_delay): +def fake_delay(hass: HomeAssistant, ha_delay: int) -> None: """Fake delay to prevent update throttle.""" hass_now = dt_util.utcnow() shifted_time = hass_now + timedelta(seconds=ha_delay) @@ -220,7 +223,9 @@ def test_update_when_unavailable( assert sensor.state is None -def update_side_effect(hass, requests_mock): +def update_side_effect( + hass: HomeAssistant, requests_mock: requests_mock.Mocker +) -> None: """Mock representation of update function.""" api, sensor_dict = setup_api(hass, MOCK_DATA, requests_mock) api.data = None diff --git a/tests/components/gpsd/test_config_flow.py b/tests/components/gpsd/test_config_flow.py index 2d68a704119..4d832e120e4 100644 --- a/tests/components/gpsd/test_config_flow.py +++ b/tests/components/gpsd/test_config_flow.py @@ -6,7 +6,7 @@ from gps3.agps3threaded import GPSD_PORT as DEFAULT_PORT from homeassistant import config_entries from homeassistant.components.gpsd.const import DOMAIN -from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT +from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -52,23 +52,3 @@ async def test_connection_error(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" - - -async def test_import(hass: HomeAssistant) -> None: - """Test import step.""" - with patch("homeassistant.components.gpsd.config_flow.socket") as mock_socket: - mock_connect = mock_socket.return_value.connect - mock_connect.return_value = None - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={CONF_HOST: HOST, CONF_PORT: 1234, CONF_NAME: "MyGPS"}, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "MyGPS" - assert result["data"] == { - CONF_HOST: HOST, - CONF_NAME: "MyGPS", - CONF_PORT: 1234, - } diff --git a/tests/components/group/common.py b/tests/components/group/common.py index 86fe537a776..a9b6356418c 100644 --- a/tests/components/group/common.py +++ b/tests/components/group/common.py @@ -13,32 +13,32 @@ from homeassistant.components.group import ( SERVICE_SET, ) from homeassistant.const import ATTR_ICON, ATTR_NAME, SERVICE_RELOAD -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.loader import bind_hass @bind_hass -def reload(hass): +def reload(hass: HomeAssistant) -> None: """Reload the automation from config.""" hass.add_job(async_reload, hass) @callback @bind_hass -def async_reload(hass): +def async_reload(hass: HomeAssistant) -> None: """Reload the automation from config.""" hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_RELOAD)) @bind_hass def set_group( - hass, - object_id, - name=None, - entity_ids=None, - icon=None, - add=None, -): + hass: HomeAssistant, + object_id: str, + name: str | None = None, + entity_ids: list[str] | None = None, + icon: str | None = None, + add: list[str] | None = None, +) -> None: """Create/Update a group.""" hass.add_job( async_set_group, @@ -54,13 +54,13 @@ def set_group( @callback @bind_hass def async_set_group( - hass, - object_id, - name=None, - entity_ids=None, - icon=None, - add=None, -): + hass: HomeAssistant, + object_id: str, + name: str | None = None, + entity_ids: list[str] | None = None, + icon: str | None = None, + add: list[str] | None = None, +) -> None: """Create/Update a group.""" data = { key: value @@ -79,7 +79,7 @@ def async_set_group( @callback @bind_hass -def async_remove(hass, object_id): +def async_remove(hass: HomeAssistant, object_id: str) -> None: """Remove a user group.""" data = {ATTR_OBJECT_ID: object_id} hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_REMOVE, data)) diff --git a/tests/components/hardware/test_websocket_api.py b/tests/components/hardware/test_websocket_api.py index e8099069a9c..1379bdba120 100644 --- a/tests/components/hardware/test_websocket_api.py +++ b/tests/components/hardware/test_websocket_api.py @@ -61,7 +61,7 @@ async def test_system_status_subscription( response = await client.receive_json() assert response["success"] - VirtualMem = namedtuple("VirtualMemory", ["available", "percent", "total"]) + VirtualMem = namedtuple("VirtualMemory", ["available", "percent", "total"]) # noqa: PYI024 vmem = VirtualMem(10 * 1024**2, 50, 30 * 1024**2) with ( diff --git a/tests/components/harmony/test_remote.py b/tests/components/harmony/test_remote.py index c0ec2235b84..8f488f9bf0d 100644 --- a/tests/components/harmony/test_remote.py +++ b/tests/components/harmony/test_remote.py @@ -1,6 +1,7 @@ """Test the Logitech Harmony Hub remote.""" from datetime import timedelta +from typing import Any from aioharmony.const import SendCommandDevice @@ -387,7 +388,9 @@ async def test_sync( mock_write_config.assert_called() -async def _send_commands_and_wait(hass, service_data): +async def _send_commands_and_wait( + hass: HomeAssistant, service_data: dict[str, Any] +) -> None: await hass.services.async_call( REMOTE_DOMAIN, SERVICE_SEND_COMMAND, diff --git a/tests/components/harmony/test_select.py b/tests/components/harmony/test_select.py index 2568feb1412..1451f146b98 100644 --- a/tests/components/harmony/test_select.py +++ b/tests/components/harmony/test_select.py @@ -91,7 +91,9 @@ async def test_select_option( assert hass.states.is_state(ENTITY_SELECT, "power_off") -async def _select_option_and_wait(hass, entity, option): +async def _select_option_and_wait( + hass: HomeAssistant, entity: str, option: str +) -> None: await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, diff --git a/tests/components/hassio/common.py b/tests/components/hassio/common.py new file mode 100644 index 00000000000..630368a0a7a --- /dev/null +++ b/tests/components/hassio/common.py @@ -0,0 +1,234 @@ +"""Provide common test tools for hassio.""" + +from __future__ import annotations + +from collections.abc import Generator +import logging +from typing import Any +from unittest.mock import DEFAULT, AsyncMock, patch + +from homeassistant.components.hassio.addon_manager import AddonManager +from homeassistant.core import HomeAssistant + +LOGGER = logging.getLogger(__name__) + + +def mock_addon_manager(hass: HomeAssistant) -> AddonManager: + """Return an AddonManager instance.""" + return AddonManager(hass, LOGGER, "Test", "test_addon") + + +def mock_discovery_info() -> Any: + """Return the discovery info from the supervisor.""" + return DEFAULT + + +def mock_get_addon_discovery_info( + discovery_info: dict[str, Any], discovery_info_side_effect: Any | None +) -> Generator[AsyncMock]: + """Mock get add-on discovery info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", + side_effect=discovery_info_side_effect, + return_value=discovery_info, + ) as get_addon_discovery_info: + yield get_addon_discovery_info + + +def mock_addon_store_info( + addon_store_info_side_effect: Any | None, +) -> Generator[AsyncMock]: + """Mock Supervisor add-on store info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", + side_effect=addon_store_info_side_effect, + ) as addon_store_info: + addon_store_info.return_value = { + "available": True, + "installed": None, + "state": None, + "version": "1.0.0", + } + yield addon_store_info + + +def mock_addon_info(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: + """Mock Supervisor add-on info.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_get_addon_info", + side_effect=addon_info_side_effect, + ) as addon_info: + addon_info.return_value = { + "available": False, + "hostname": None, + "options": {}, + "state": None, + "update_available": False, + "version": None, + } + yield addon_info + + +def mock_addon_not_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on not installed.""" + addon_store_info.return_value["available"] = True + return addon_info + + +def mock_addon_installed( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> AsyncMock: + """Mock add-on already installed but not running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["hostname"] = "core-test-addon" + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: + """Mock add-on already running.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["hostname"] = "core-test-addon" + addon_info.return_value["state"] = "started" + addon_info.return_value["version"] = "1.0.0" + return addon_info + + +def mock_install_addon_side_effect( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: + """Return the install add-on side effect.""" + + async def install_addon(hass: HomeAssistant, slug): + """Mock install add-on.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "stopped", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "stopped" + addon_info.return_value["version"] = "1.0.0" + + return install_addon + + +def mock_install_addon(install_addon_side_effect: Any | None) -> Generator[AsyncMock]: + """Mock install add-on.""" + + with patch( + "homeassistant.components.hassio.addon_manager.async_install_addon", + side_effect=install_addon_side_effect, + ) as install_addon: + yield install_addon + + +def mock_start_addon_side_effect( + addon_store_info: AsyncMock, addon_info: AsyncMock +) -> Any | None: + """Return the start add-on options side effect.""" + + async def start_addon(hass: HomeAssistant, slug): + """Mock start add-on.""" + addon_store_info.return_value = { + "available": True, + "installed": "1.0.0", + "state": "started", + "version": "1.0.0", + } + addon_info.return_value["available"] = True + addon_info.return_value["state"] = "started" + + return start_addon + + +def mock_start_addon(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: + """Mock start add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_start_addon", + side_effect=start_addon_side_effect, + ) as start_addon: + yield start_addon + + +def mock_stop_addon() -> Generator[AsyncMock]: + """Mock stop add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_stop_addon" + ) as stop_addon: + yield stop_addon + + +def mock_restart_addon(restart_addon_side_effect: Any | None) -> Generator[AsyncMock]: + """Mock restart add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_restart_addon", + side_effect=restart_addon_side_effect, + ) as restart_addon: + yield restart_addon + + +def mock_uninstall_addon() -> Generator[AsyncMock]: + """Mock uninstall add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_uninstall_addon" + ) as uninstall_addon: + yield uninstall_addon + + +def mock_addon_options(addon_info: AsyncMock) -> dict[str, Any]: + """Mock add-on options.""" + return addon_info.return_value["options"] + + +def mock_set_addon_options_side_effect(addon_options: dict[str, Any]) -> Any | None: + """Return the set add-on options side effect.""" + + async def set_addon_options(hass: HomeAssistant, slug: str, options: dict) -> None: + """Mock set add-on options.""" + addon_options.update(options["options"]) + + return set_addon_options + + +def mock_set_addon_options( + set_addon_options_side_effect: Any | None, +) -> Generator[AsyncMock]: + """Mock set add-on options.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_set_addon_options", + side_effect=set_addon_options_side_effect, + ) as set_options: + yield set_options + + +def mock_create_backup() -> Generator[AsyncMock]: + """Mock create backup.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_create_backup" + ) as create_backup: + yield create_backup + + +def mock_update_addon() -> Generator[AsyncMock]: + """Mock update add-on.""" + with patch( + "homeassistant.components.hassio.addon_manager.async_update_addon" + ) as update_addon: + yield update_addon diff --git a/tests/components/hassio/test_addon_manager.py b/tests/components/hassio/test_addon_manager.py index 6a20c6eec88..4cb57e5b8d8 100644 --- a/tests/components/hassio/test_addon_manager.py +++ b/tests/components/hassio/test_addon_manager.py @@ -3,10 +3,8 @@ from __future__ import annotations import asyncio -from collections.abc import Generator -import logging from typing import Any -from unittest.mock import AsyncMock, call, patch +from unittest.mock import AsyncMock, call import pytest @@ -19,154 +17,6 @@ from homeassistant.components.hassio.addon_manager import ( from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.core import HomeAssistant -LOGGER = logging.getLogger(__name__) - - -@pytest.fixture(name="addon_manager") -def addon_manager_fixture(hass: HomeAssistant) -> AddonManager: - """Return an AddonManager instance.""" - return AddonManager(hass, LOGGER, "Test", "test_addon") - - -@pytest.fixture(name="addon_not_installed") -def addon_not_installed_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on not installed.""" - addon_store_info.return_value["available"] = True - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-test-addon" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="get_addon_discovery_info") -def get_addon_discovery_info_fixture() -> Generator[AsyncMock]: - """Mock get add-on discovery info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info" - ) as get_addon_discovery_info: - yield get_addon_discovery_info - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture() -> Generator[AsyncMock]: - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": False, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture() -> Generator[AsyncMock]: - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": False, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture() -> Generator[AsyncMock]: - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon") -def install_addon_fixture() -> Generator[AsyncMock]: - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon" - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture() -> Generator[AsyncMock]: - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture() -> Generator[AsyncMock]: - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon - - -@pytest.fixture(name="restart_addon") -def restart_addon_fixture() -> Generator[AsyncMock]: - """Mock restart add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_restart_addon" - ) as restart_addon: - yield restart_addon - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture() -> Generator[AsyncMock]: - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon - - -@pytest.fixture(name="create_backup") -def create_backup_fixture() -> Generator[AsyncMock]: - """Mock create backup.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_create_backup" - ) as create_backup: - yield create_backup - - -@pytest.fixture(name="update_addon") -def mock_update_addon() -> Generator[AsyncMock]: - """Mock update add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_update_addon" - ) as update_addon: - yield update_addon - async def test_not_installed_raises_exception( addon_manager: AddonManager, @@ -888,9 +738,10 @@ async def test_create_backup_error( ) +@pytest.mark.usefixtures("addon_installed") +@pytest.mark.parametrize("set_addon_options_side_effect", [None]) async def test_schedule_install_setup_addon( addon_manager: AddonManager, - addon_installed: AsyncMock, install_addon: AsyncMock, set_addon_options: AsyncMock, start_addon: AsyncMock, @@ -1065,11 +916,10 @@ async def test_schedule_install_setup_addon_logs_error( assert start_addon.call_count == start_addon_calls +@pytest.mark.usefixtures("addon_installed") +@pytest.mark.parametrize("set_addon_options_side_effect", [None]) async def test_schedule_setup_addon( - addon_manager: AddonManager, - addon_installed: AsyncMock, - set_addon_options: AsyncMock, - start_addon: AsyncMock, + addon_manager: AddonManager, set_addon_options: AsyncMock, start_addon: AsyncMock ) -> None: """Test schedule setup addon.""" start_task = addon_manager.async_schedule_setup_addon({"test_key": "test"}) diff --git a/tests/components/hddtemp/test_sensor.py b/tests/components/hddtemp/test_sensor.py index 2bd0519c12c..15740ffa0ea 100644 --- a/tests/components/hddtemp/test_sensor.py +++ b/tests/components/hddtemp/test_sensor.py @@ -60,7 +60,7 @@ REFERENCE = { class TelnetMock: """Mock class for the telnetlib.Telnet object.""" - def __init__(self, host, port, timeout=0): + def __init__(self, host, port, timeout=0) -> None: """Initialize Telnet object.""" self.host = host self.port = port diff --git a/tests/components/hdmi_cec/__init__.py b/tests/components/hdmi_cec/__init__.py index 5cf8ed18b6a..1d51fa0cc50 100644 --- a/tests/components/hdmi_cec/__init__.py +++ b/tests/components/hdmi_cec/__init__.py @@ -8,7 +8,7 @@ from homeassistant.components.hdmi_cec import KeyPressCommand, KeyReleaseCommand class MockHDMIDevice: """Mock of a HDMIDevice.""" - def __init__(self, *, logical_address, **values): + def __init__(self, *, logical_address, **values) -> None: """Mock of a HDMIDevice.""" self.set_update_callback = Mock(side_effect=self._set_update_callback) self.logical_address = logical_address diff --git a/tests/components/heos/test_media_player.py b/tests/components/heos/test_media_player.py index 19f7ec74daf..089fa1cceea 100644 --- a/tests/components/heos/test_media_player.py +++ b/tests/components/heos/test_media_player.py @@ -1,6 +1,7 @@ """Tests for the Heos Media Player platform.""" import asyncio +from typing import Any from pyheos import CommandFailedError, const from pyheos.error import HeosError @@ -58,8 +59,12 @@ from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.dispatcher import async_dispatcher_connect from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry -async def setup_platform(hass, config_entry, config): + +async def setup_platform( + hass: HomeAssistant, config_entry: MockConfigEntry, config: dict[str, Any] +) -> None: """Set up the media player platform for testing.""" config_entry.add_to_hass(hass) assert await async_setup_component(hass, DOMAIN, config) diff --git a/tests/components/heos/test_services.py b/tests/components/heos/test_services.py index 2d812eb83ab..d8b8b5038b0 100644 --- a/tests/components/heos/test_services.py +++ b/tests/components/heos/test_services.py @@ -13,8 +13,10 @@ from homeassistant.components.heos.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from tests.common import MockConfigEntry -async def setup_component(hass, config_entry): + +async def setup_component(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Set up the component for testing.""" config_entry.add_to_hass(hass) assert await async_setup_component(hass, DOMAIN, {}) diff --git a/tests/components/history/test_init.py b/tests/components/history/test_init.py index 7806b7c9ef4..3b4b02a877e 100644 --- a/tests/components/history/test_init.py +++ b/tests/components/history/test_init.py @@ -1,6 +1,6 @@ """The tests the History component.""" -from datetime import timedelta +from datetime import datetime, timedelta from http import HTTPStatus import json from unittest.mock import sentinel @@ -13,7 +13,7 @@ from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.models import process_timestamp from homeassistant.const import EVENT_HOMEASSISTANT_FINAL_WRITE -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -291,13 +291,9 @@ async def test_get_significant_states_only(hass: HomeAssistant, hass_history) -> ) -async def check_significant_states(hass, zero, four, states, config): - """Check if significant states are retrieved.""" - hist = get_significant_states(hass, zero, four) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def async_record_states(hass): +async def async_record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State | None]]]: """Record some test states. We inject a bunch of state updates from media player, zone and diff --git a/tests/components/history/test_init_db_schema_30.py b/tests/components/history/test_init_db_schema_30.py index bec074362ca..1520d5363d5 100644 --- a/tests/components/history/test_init_db_schema_30.py +++ b/tests/components/history/test_init_db_schema_30.py @@ -2,7 +2,7 @@ from __future__ import annotations -from datetime import timedelta +from datetime import datetime, timedelta from http import HTTPStatus import json from unittest.mock import patch, sentinel @@ -14,7 +14,7 @@ from homeassistant.components import recorder from homeassistant.components.recorder import Recorder from homeassistant.components.recorder.history import get_significant_states from homeassistant.components.recorder.models import process_timestamp -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -309,13 +309,9 @@ async def test_get_significant_states_only( ) -def check_significant_states(hass, zero, four, states, config): - """Check if significant states are retrieved.""" - hist = get_significant_states(hass, zero, four) - assert_dict_of_states_equal_without_context_and_last_changed(states, hist) - - -async def async_record_states(hass): +async def async_record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State | None]]]: """Record some test states. We inject a bunch of state updates from media player, zone and diff --git a/tests/components/hive/test_config_flow.py b/tests/components/hive/test_config_flow.py index fd6eb564a39..e5dba49dcc1 100644 --- a/tests/components/hive/test_config_flow.py +++ b/tests/components/hive/test_config_flow.py @@ -246,14 +246,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "homeassistant.components.hive.config_flow.Auth.login", side_effect=hive_exceptions.HiveInvalidPassword(), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_password"} @@ -305,14 +298,7 @@ async def test_reauth_2fa_flow(hass: HomeAssistant) -> None: "homeassistant.components.hive.config_flow.Auth.login", side_effect=hive_exceptions.HiveInvalidPassword(), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_password"} diff --git a/tests/components/hlk_sw16/test_config_flow.py b/tests/components/hlk_sw16/test_config_flow.py index 6a758ec5066..2225ea1b79a 100644 --- a/tests/components/hlk_sw16/test_config_flow.py +++ b/tests/components/hlk_sw16/test_config_flow.py @@ -12,7 +12,7 @@ from homeassistant.data_entry_flow import FlowResultType class MockSW16Client: """Class to mock the SW16Client client.""" - def __init__(self, fail): + def __init__(self, fail) -> None: """Initialise client with failure modes.""" self.fail = fail self.disconnect_callback = None diff --git a/tests/components/homeassistant/snapshots/test_exposed_entities.ambr b/tests/components/homeassistant/snapshots/test_exposed_entities.ambr index 55b95186b49..9c93655cd4e 100644 --- a/tests/components/homeassistant/snapshots/test_exposed_entities.ambr +++ b/tests/components/homeassistant/snapshots/test_exposed_entities.ambr @@ -13,13 +13,3 @@ dict({ }) # --- -# name: test_listeners - dict({ - 'light.kitchen': dict({ - 'should_expose': True, - }), - 'switch.test_unique1': mappingproxy({ - 'should_expose': True, - }), - }) -# --- diff --git a/tests/components/homeassistant/test_init.py b/tests/components/homeassistant/test_init.py index d090da280a0..a0902fe62df 100644 --- a/tests/components/homeassistant/test_init.py +++ b/tests/components/homeassistant/test_init.py @@ -1,7 +1,5 @@ """The tests for Core components.""" -import asyncio -import unittest from unittest.mock import Mock, patch import pytest @@ -44,206 +42,143 @@ from tests.common import ( MockUser, async_capture_events, async_mock_service, - get_test_home_assistant, - mock_service, patch_yaml_files, ) -def turn_on(hass, entity_id=None, **service_data): - """Turn specified entity on if possible. - - This is a legacy helper method. Do not use it for new tests. - """ - if entity_id is not None: - service_data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(ha.DOMAIN, SERVICE_TURN_ON, service_data) +async def test_is_on(hass: HomeAssistant) -> None: + """Test is_on method.""" + with pytest.raises( + RuntimeError, + match="Detected code that uses homeassistant.components.is_on. This is deprecated and will stop working", + ): + assert comps.is_on(hass, "light.Bowl") -def turn_off(hass, entity_id=None, **service_data): - """Turn specified entity off. - - This is a legacy helper method. Do not use it for new tests. - """ - if entity_id is not None: - service_data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(ha.DOMAIN, SERVICE_TURN_OFF, service_data) +async def test_turn_on_without_entities(hass: HomeAssistant) -> None: + """Test turn_on method without entities.""" + await async_setup_component(hass, ha.DOMAIN, {}) + calls = async_mock_service(hass, "light", SERVICE_TURN_ON) + await hass.services.async_call(ha.DOMAIN, SERVICE_TURN_ON, blocking=True) + assert len(calls) == 0 -def toggle(hass, entity_id=None, **service_data): - """Toggle specified entity. - - This is a legacy helper method. Do not use it for new tests. - """ - if entity_id is not None: - service_data[ATTR_ENTITY_ID] = entity_id - - hass.services.call(ha.DOMAIN, SERVICE_TOGGLE, service_data) - - -def stop(hass): - """Stop Home Assistant. - - This is a legacy helper method. Do not use it for new tests. - """ - hass.services.call(ha.DOMAIN, SERVICE_HOMEASSISTANT_STOP) - - -def restart(hass): - """Stop Home Assistant. - - This is a legacy helper method. Do not use it for new tests. - """ - hass.services.call(ha.DOMAIN, SERVICE_HOMEASSISTANT_RESTART) - - -def check_config(hass): - """Check the config files. - - This is a legacy helper method. Do not use it for new tests. - """ - hass.services.call(ha.DOMAIN, SERVICE_CHECK_CONFIG) - - -def reload_core_config(hass): - """Reload the core config. - - This is a legacy helper method. Do not use it for new tests. - """ - hass.services.call(ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG) - - -class TestComponentsCore(unittest.TestCase): - """Test homeassistant.components module.""" - - def setUp(self): - """Set up things to be run when tests are started.""" - self._manager = get_test_home_assistant() - self.hass = self._manager.__enter__() - assert asyncio.run_coroutine_threadsafe( - async_setup_component(self.hass, "homeassistant", {}), self.hass.loop - ).result() - - self.hass.states.set("light.Bowl", STATE_ON) - self.hass.states.set("light.Ceiling", STATE_OFF) - - def tearDown(self) -> None: - """Tear down hass object.""" - self.hass.stop() - self._manager.__exit__(None, None, None) - - def test_is_on(self): - """Test is_on method.""" - with pytest.raises( - RuntimeError, - match="Detected code that uses homeassistant.components.is_on. This is deprecated and will stop working", - ): - assert comps.is_on(self.hass, "light.Bowl") - - def test_turn_on_without_entities(self): - """Test turn_on method without entities.""" - calls = mock_service(self.hass, "light", SERVICE_TURN_ON) - turn_on(self.hass) - self.hass.block_till_done() - assert len(calls) == 0 - - def test_turn_on(self): - """Test turn_on method.""" - calls = mock_service(self.hass, "light", SERVICE_TURN_ON) - turn_on(self.hass, "light.Ceiling") - self.hass.block_till_done() - assert len(calls) == 1 - - def test_turn_off(self): - """Test turn_off method.""" - calls = mock_service(self.hass, "light", SERVICE_TURN_OFF) - turn_off(self.hass, "light.Bowl") - self.hass.block_till_done() - assert len(calls) == 1 - - def test_toggle(self): - """Test toggle method.""" - calls = mock_service(self.hass, "light", SERVICE_TOGGLE) - toggle(self.hass, "light.Bowl") - self.hass.block_till_done() - assert len(calls) == 1 - - @patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) - def test_reload_core_conf(self): - """Test reload core conf service.""" - ent = entity.Entity() - ent.entity_id = "test.entity" - ent.hass = self.hass - ent.schedule_update_ha_state() - self.hass.block_till_done() - - state = self.hass.states.get("test.entity") - assert state is not None - assert state.state == "unknown" - assert state.attributes == {} - - files = { - config.YAML_CONFIG_FILE: yaml.dump( - { - ha.DOMAIN: { - "country": "SE", # To avoid creating issue country_not_configured - "latitude": 10, - "longitude": 20, - "customize": {"test.Entity": {"hello": "world"}}, - } - } - ) - } - with patch_yaml_files(files, True): - reload_core_config(self.hass) - self.hass.block_till_done() - - assert self.hass.config.latitude == 10 - assert self.hass.config.longitude == 20 - - ent.schedule_update_ha_state() - self.hass.block_till_done() - - state = self.hass.states.get("test.entity") - assert state is not None - assert state.state == "unknown" - assert state.attributes.get("hello") == "world" - - @patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) - @patch("homeassistant.components.homeassistant._LOGGER.error") - @patch("homeassistant.config.async_process_ha_core_config") - def test_reload_core_with_wrong_conf(self, mock_process, mock_error): - """Test reload core conf service.""" - files = {config.YAML_CONFIG_FILE: yaml.dump(["invalid", "config"])} - with patch_yaml_files(files, True): - reload_core_config(self.hass) - self.hass.block_till_done() - - assert mock_error.called - assert mock_process.called is False - - @patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) - @patch( - "homeassistant.config.async_check_ha_config_file", - side_effect=HomeAssistantError("Test error"), +async def test_turn_on(hass: HomeAssistant) -> None: + """Test turn_on method.""" + await async_setup_component(hass, ha.DOMAIN, {}) + calls = async_mock_service(hass, "light", SERVICE_TURN_ON) + await hass.services.async_call( + ha.DOMAIN, SERVICE_TURN_ON, {ATTR_ENTITY_ID: "light.Ceiling"}, blocking=True ) - def test_restart_homeassistant_wrong_conf(self, mock_check, mock_restart): - """Test stop service.""" - restart(self.hass) - self.hass.block_till_done() - assert mock_check.called - assert not mock_restart.called + assert len(calls) == 1 - @patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) - @patch("homeassistant.config.async_check_ha_config_file", return_value=None) - def test_check_config(self, mock_check, mock_stop): - """Test stop service.""" - check_config(self.hass) - self.hass.block_till_done() - assert mock_check.called - assert not mock_stop.called + +async def test_turn_off(hass: HomeAssistant) -> None: + """Test turn_off method.""" + await async_setup_component(hass, ha.DOMAIN, {}) + calls = async_mock_service(hass, "light", SERVICE_TURN_OFF) + await hass.services.async_call( + ha.DOMAIN, SERVICE_TURN_OFF, {ATTR_ENTITY_ID: "light.Bowl"}, blocking=True + ) + assert len(calls) == 1 + + +async def test_toggle(hass: HomeAssistant) -> None: + """Test toggle method.""" + await async_setup_component(hass, ha.DOMAIN, {}) + calls = async_mock_service(hass, "light", SERVICE_TOGGLE) + await hass.services.async_call( + ha.DOMAIN, SERVICE_TOGGLE, {ATTR_ENTITY_ID: "light.Bowl"}, blocking=True + ) + assert len(calls) == 1 + + +@patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) +async def test_reload_core_conf(hass: HomeAssistant) -> None: + """Test reload core conf service.""" + await async_setup_component(hass, ha.DOMAIN, {}) + ent = entity.Entity() + ent.entity_id = "test.entity" + ent.hass = hass + ent.async_write_ha_state() + + state = hass.states.get("test.entity") + assert state is not None + assert state.state == "unknown" + assert state.attributes == {} + + files = { + config.YAML_CONFIG_FILE: yaml.dump( + { + ha.DOMAIN: { + "country": "SE", # To avoid creating issue country_not_configured + "latitude": 10, + "longitude": 20, + "customize": {"test.Entity": {"hello": "world"}}, + } + } + ) + } + with patch_yaml_files(files, True): + await hass.services.async_call( + ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG, blocking=True + ) + + assert hass.config.latitude == 10 + assert hass.config.longitude == 20 + + ent.async_write_ha_state() + + state = hass.states.get("test.entity") + assert state is not None + assert state.state == "unknown" + assert state.attributes.get("hello") == "world" + + +@patch("homeassistant.config.os.path.isfile", Mock(return_value=True)) +@patch("homeassistant.components.homeassistant._LOGGER.error") +@patch("homeassistant.config.async_process_ha_core_config") +async def test_reload_core_with_wrong_conf( + mock_process, mock_error, hass: HomeAssistant +) -> None: + """Test reload core conf service.""" + files = {config.YAML_CONFIG_FILE: yaml.dump(["invalid", "config"])} + await async_setup_component(hass, ha.DOMAIN, {}) + with patch_yaml_files(files, True): + await hass.services.async_call( + ha.DOMAIN, SERVICE_RELOAD_CORE_CONFIG, blocking=True + ) + + assert mock_error.called + assert mock_process.called is False + + +@patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) +@patch( + "homeassistant.config.async_check_ha_config_file", + side_effect=HomeAssistantError("Test error"), +) +async def test_restart_homeassistant_wrong_conf( + mock_check, mock_restart, hass: HomeAssistant +) -> None: + """Test restart service with error.""" + await async_setup_component(hass, ha.DOMAIN, {}) + with pytest.raises(HomeAssistantError, match="Test error"): + await hass.services.async_call( + ha.DOMAIN, SERVICE_HOMEASSISTANT_RESTART, blocking=True + ) + assert mock_check.called + assert not mock_restart.called + + +@patch("homeassistant.core.HomeAssistant.async_stop", return_value=None) +@patch("homeassistant.config.async_check_ha_config_file", return_value=None) +async def test_check_config(mock_check, mock_stop, hass: HomeAssistant) -> None: + """Test stop service.""" + await async_setup_component(hass, ha.DOMAIN, {}) + await hass.services.async_call(ha.DOMAIN, SERVICE_CHECK_CONFIG, blocking=True) + assert mock_check.called + assert not mock_stop.called async def test_turn_on_skips_domains_without_service( diff --git a/tests/components/homeassistant_hardware/conftest.py b/tests/components/homeassistant_hardware/conftest.py index b62ccaf855b..c63dca74391 100644 --- a/tests/components/homeassistant_hardware/conftest.py +++ b/tests/components/homeassistant_hardware/conftest.py @@ -49,112 +49,6 @@ def mock_zha_get_last_network_settings() -> Generator[None]: yield -@pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info, addon_info): - """Mock add-on already running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed(addon_store_info, addon_info): - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture(): - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": True, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture(): - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture(): - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture(addon_store_info, addon_info): - """Return the install add-on side effect.""" - - async def install_addon(hass, slug): - """Mock install add-on.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - return install_addon - - -@pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect): - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture(): - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon - - @pytest.fixture(name="stop_addon") def stop_addon_fixture(): """Mock stop add-on.""" @@ -162,12 +56,3 @@ def stop_addon_fixture(): "homeassistant.components.hassio.addon_manager.async_stop_addon" ) as stop_addon: yield stop_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture(): - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon diff --git a/tests/components/homeassistant_sky_connect/conftest.py b/tests/components/homeassistant_sky_connect/conftest.py index 69b0901aadf..d71bf4305b3 100644 --- a/tests/components/homeassistant_sky_connect/conftest.py +++ b/tests/components/homeassistant_sky_connect/conftest.py @@ -49,112 +49,6 @@ def mock_zha_get_last_network_settings() -> Generator[None]: yield -@pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info, addon_info): - """Mock add-on already running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed(addon_store_info, addon_info): - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture(): - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": True, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture(): - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture(): - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture(addon_store_info, addon_info): - """Return the install add-on side effect.""" - - async def install_addon(hass, slug): - """Mock install add-on.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - return install_addon - - -@pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect): - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture(): - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon - - @pytest.fixture(name="stop_addon") def stop_addon_fixture(): """Mock stop add-on.""" @@ -162,12 +56,3 @@ def stop_addon_fixture(): "homeassistant.components.hassio.addon_manager.async_stop_addon" ) as stop_addon: yield stop_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture(): - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon diff --git a/tests/components/homeassistant_yellow/conftest.py b/tests/components/homeassistant_yellow/conftest.py index 0077fb27058..7247c7da4e2 100644 --- a/tests/components/homeassistant_yellow/conftest.py +++ b/tests/components/homeassistant_yellow/conftest.py @@ -47,109 +47,3 @@ def mock_zha_get_last_network_settings() -> Generator[None]: AsyncMock(return_value=None), ): yield - - -@pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info, addon_info): - """Mock add-on already running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed(addon_store_info, addon_info): - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture(): - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": True, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture(): - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": True, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="set_addon_options") -def set_addon_options_fixture(): - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options" - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture(addon_store_info, addon_info): - """Return the install add-on side effect.""" - - async def install_addon(hass, slug): - """Mock install add-on.""" - addon_store_info.return_value = { - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["hostname"] = "core-silabs-multiprotocol" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - return install_addon - - -@pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect): - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture(): - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon diff --git a/tests/components/homeassistant_yellow/test_config_flow.py b/tests/components/homeassistant_yellow/test_config_flow.py index 95d7df89c9d..949e58e61b6 100644 --- a/tests/components/homeassistant_yellow/test_config_flow.py +++ b/tests/components/homeassistant_yellow/test_config_flow.py @@ -6,8 +6,17 @@ from unittest.mock import Mock, patch import pytest from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN -from homeassistant.components.homeassistant_yellow.const import DOMAIN -from homeassistant.components.zha import DOMAIN as ZHA_DOMAIN +from homeassistant.components.hassio.addon_manager import AddonInfo, AddonState +from homeassistant.components.homeassistant_hardware.firmware_config_flow import ( + STEP_PICK_FIRMWARE_ZIGBEE, +) +from homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon import ( + CONF_DISABLE_MULTI_PAN, + get_flasher_addon_manager, + get_multiprotocol_addon_manager, +) +from homeassistant.components.homeassistant_hardware.util import ApplicationType +from homeassistant.components.homeassistant_yellow.const import DOMAIN, RADIO_DEVICE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -57,22 +66,28 @@ async def test_config_flow(hass: HomeAssistant) -> None: mock_integration(hass, MockModule("hassio")) await async_setup_component(hass, HASSIO_DOMAIN, {}) - with patch( - "homeassistant.components.homeassistant_yellow.async_setup_entry", - return_value=True, - ) as mock_setup_entry: + with ( + patch( + "homeassistant.components.homeassistant_yellow.async_setup_entry", + return_value=True, + ) as mock_setup_entry, + patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.probe_silabs_firmware_type", + return_value=ApplicationType.EZSP, + ), + ): result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": "system"} ) assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "Home Assistant Yellow" - assert result["data"] == {} + assert result["data"] == {"firmware": "ezsp"} assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(DOMAIN)[0] - assert config_entry.data == {} + assert config_entry.data == {"firmware": "ezsp"} assert config_entry.options == {} assert config_entry.title == "Home Assistant Yellow" @@ -84,10 +99,12 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -104,165 +121,6 @@ async def test_config_flow_single_entry(hass: HomeAssistant) -> None: mock_setup_entry.assert_not_called() -async def test_option_flow_install_multi_pan_addon( - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, -) -> None: - """Test installing the multi pan addon.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - {"next_step_id": "multipan_settings"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": "/dev/ttyAMA1", - "baudrate": "115200", - "flow_control": True, - } - }, - ) - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - - -async def test_option_flow_install_multi_pan_addon_zha( - hass: HomeAssistant, - addon_store_info, - addon_info, - install_addon, - set_addon_options, - start_addon, -) -> None: - """Test installing the multi pan addon when a zha config entry exists.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - - zha_config_entry = MockConfigEntry( - data={"device": {"path": "/dev/ttyAMA1"}, "radio_type": "ezsp"}, - domain=ZHA_DOMAIN, - options={}, - title="Yellow", - ) - zha_config_entry.add_to_hass(hass) - - result = await hass.config_entries.options.async_init(config_entry.entry_id) - assert result["type"] is FlowResultType.MENU - - with patch( - "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", - side_effect=Mock(return_value=True), - ): - result = await hass.config_entries.options.async_configure( - result["flow_id"], - {"next_step_id": "multipan_settings"}, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "addon_not_installed" - - result = await hass.config_entries.options.async_configure( - result["flow_id"], - user_input={ - "enable_multi_pan": True, - }, - ) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "install_addon" - assert result["progress_action"] == "install_addon" - - await hass.async_block_till_done() - install_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - set_addon_options.assert_called_once_with( - hass, - "core_silabs_multiprotocol", - { - "options": { - "autoflash_firmware": True, - "device": "/dev/ttyAMA1", - "baudrate": "115200", - "flow_control": True, - } - }, - ) - # Check the ZHA config entry data is updated - assert zha_config_entry.data == { - "device": { - "path": "socket://core-silabs-multiprotocol:9999", - "baudrate": 115200, - "flow_control": None, - }, - "radio_type": "ezsp", - } - - await hass.async_block_till_done() - start_addon.assert_called_once_with(hass, "core_silabs_multiprotocol") - - result = await hass.config_entries.options.async_configure(result["flow_id"]) - assert result["type"] is FlowResultType.CREATE_ENTRY - - @pytest.mark.parametrize( ("reboot_menu_choice", "reboot_calls"), [("reboot_now", 1), ("reboot_later", 0)], @@ -281,10 +139,12 @@ async def test_option_flow_led_settings( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -327,10 +187,12 @@ async def test_option_flow_led_settings_unchanged( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -359,10 +221,12 @@ async def test_option_flow_led_settings_fail_1(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -391,10 +255,12 @@ async def test_option_flow_led_settings_fail_2( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) @@ -418,3 +284,139 @@ async def test_option_flow_led_settings_fail_2( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "write_hw_settings_error" + + +async def test_firmware_options_flow(hass: HomeAssistant) -> None: + """Test the firmware options flow for Yellow.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + config_entry = MockConfigEntry( + data={"firmware": ApplicationType.SPINEL}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + # First step is confirmation + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "main_menu" + assert "firmware_settings" in result["menu_options"] + + # Pick firmware settings + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "firmware_settings"}, + ) + + assert result["step_id"] == "pick_firmware" + assert result["description_placeholders"]["firmware_type"] == "spinel" + assert result["description_placeholders"]["model"] == "Home Assistant Yellow" + + async def mock_async_step_pick_firmware_zigbee(self, data): + return await self.async_step_confirm_zigbee(user_input={}) + + with patch( + "homeassistant.components.homeassistant_hardware.firmware_config_flow.BaseFirmwareOptionsFlow.async_step_pick_firmware_zigbee", + autospec=True, + side_effect=mock_async_step_pick_firmware_zigbee, + ): + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": STEP_PICK_FIRMWARE_ZIGBEE}, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"] is True + + assert config_entry.data == { + "firmware": "ezsp", + } + + +async def test_options_flow_multipan_uninstall(hass: HomeAssistant) -> None: + """Test options flow for when multi-PAN firmware is installed.""" + mock_integration(hass, MockModule("hassio")) + await async_setup_component(hass, HASSIO_DOMAIN, {}) + + config_entry = MockConfigEntry( + data={"firmware": ApplicationType.CPC}, + domain=DOMAIN, + options={}, + title="Home Assistant Yellow", + version=1, + minor_version=2, + ) + config_entry.add_to_hass(hass) + + # Multi-PAN addon is running + mock_multipan_manager = Mock(spec_set=await get_multiprotocol_addon_manager(hass)) + mock_multipan_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={"device": RADIO_DEVICE}, + state=AddonState.RUNNING, + update_available=False, + version="1.0.0", + ) + + mock_flasher_manager = Mock(spec_set=get_flasher_addon_manager(hass)) + mock_flasher_manager.async_get_addon_info.return_value = AddonInfo( + available=True, + hostname=None, + options={}, + state=AddonState.NOT_RUNNING, + update_available=False, + version="1.0.0", + ) + + with ( + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_multiprotocol_addon_manager", + return_value=mock_multipan_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.get_flasher_addon_manager", + return_value=mock_flasher_manager, + ), + patch( + "homeassistant.components.homeassistant_hardware.silabs_multiprotocol_addon.is_hassio", + return_value=True, + ), + ): + result = await hass.config_entries.options.async_init(config_entry.entry_id) + assert result["type"] is FlowResultType.MENU + assert result["step_id"] == "main_menu" + assert "multipan_settings" in result["menu_options"] + + # Pick multi-PAN settings + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "multipan_settings"}, + ) + + # Pick the uninstall option + result = await hass.config_entries.options.async_configure( + result["flow_id"], + user_input={"next_step_id": "uninstall_addon"}, + ) + + # Check the box + result = await hass.config_entries.options.async_configure( + result["flow_id"], user_input={CONF_DISABLE_MULTI_PAN: True} + ) + + # Finish the flow + result = await hass.config_entries.options.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.options.async_configure(result["flow_id"]) + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.options.async_configure(result["flow_id"]) + assert result["type"] is FlowResultType.CREATE_ENTRY + + # We've reverted the firmware back to Zigbee + assert config_entry.data["firmware"] == "ezsp" diff --git a/tests/components/homeassistant_yellow/test_init.py b/tests/components/homeassistant_yellow/test_init.py index ec3ba4e7005..5d534dad1e7 100644 --- a/tests/components/homeassistant_yellow/test_init.py +++ b/tests/components/homeassistant_yellow/test_init.py @@ -6,10 +6,14 @@ import pytest from homeassistant.components import zha from homeassistant.components.hassio import DOMAIN as HASSIO_DOMAIN -from homeassistant.components.hassio.handler import HassioAPIError +from homeassistant.components.homeassistant_hardware.util import ( + ApplicationType, + FirmwareGuess, +) from homeassistant.components.homeassistant_yellow.const import DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, MockModule, mock_integration @@ -27,10 +31,12 @@ async def test_setup_entry( # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -42,6 +48,14 @@ async def test_setup_entry( "homeassistant.components.onboarding.async_is_onboarded", return_value=onboarded, ), + patch( + "homeassistant.components.homeassistant_yellow.guess_firmware_type", + return_value=FirmwareGuess( # Nothing is setup + is_running=False, + firmware_type=ApplicationType.EZSP, + source="unknown", + ), + ), ): assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done(wait_background_tasks=True) @@ -74,118 +88,12 @@ async def test_setup_zha(hass: HomeAssistant, addon_store_info) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ) as mock_get_os_info, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get_os_info.mock_calls) == 1 - - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": "hardware", - "path": "/dev/ttyAMA1", - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == "Yellow" - - -async def test_setup_zha_multipan( - hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - addon_info.return_value["options"]["device"] = "/dev/ttyAMA1" - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ) as mock_get_os_info, - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done(wait_background_tasks=True) - assert len(mock_get_os_info.mock_calls) == 1 - - # Finish setting up ZHA - zha_flows = hass.config_entries.flow.async_progress_by_handler("zha") - assert len(zha_flows) == 1 - assert zha_flows[0]["step_id"] == "choose_formation_strategy" - - await hass.config_entries.flow.async_configure( - zha_flows[0]["flow_id"], - user_input={"next_step_id": zha.config_flow.FORMATION_REUSE_SETTINGS}, - ) - await hass.async_block_till_done() - - config_entry = hass.config_entries.async_entries("zha")[0] - assert config_entry.data == { - "device": { - "baudrate": 115200, - "flow_control": None, - "path": "socket://core-silabs-multiprotocol:9999", - }, - "radio_type": "ezsp", - } - assert config_entry.options == {} - assert config_entry.title == "Yellow Multiprotocol" - - -async def test_setup_zha_multipan_other_device( - hass: HomeAssistant, addon_info, addon_running -) -> None: - """Test zha gets the right config.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - addon_info.return_value["options"]["device"] = "/dev/not_yellow_radio" - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -229,10 +137,12 @@ async def test_setup_entry_no_hassio(hass: HomeAssistant) -> None: """Test setup of a config entry without hassio.""" # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries()) == 1 @@ -254,10 +164,12 @@ async def test_setup_entry_wrong_board(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) assert len(hass.config_entries.async_entries()) == 1 @@ -280,10 +192,12 @@ async def test_setup_entry_wait_hassio(hass: HomeAssistant) -> None: # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.EZSP}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with patch( @@ -303,14 +217,15 @@ async def test_setup_entry_addon_info_fails( """Test setup of a config entry when fetching addon info fails.""" mock_integration(hass, MockModule("hassio")) await async_setup_component(hass, HASSIO_DOMAIN, {}) - addon_store_info.side_effect = HassioAPIError("Boom") # Setup the config entry config_entry = MockConfigEntry( - data={}, + data={"firmware": ApplicationType.CPC}, domain=DOMAIN, options={}, title="Home Assistant Yellow", + version=1, + minor_version=2, ) config_entry.add_to_hass(hass) with ( @@ -319,41 +234,15 @@ async def test_setup_entry_addon_info_fails( return_value={"board": "yellow"}, ), patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False + "homeassistant.components.onboarding.async_is_onboarded", + return_value=False, + ), + patch( + "homeassistant.components.homeassistant_yellow.check_multi_pan_addon", + side_effect=HomeAssistantError("Boom"), ), ): assert not await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.SETUP_RETRY - - -async def test_setup_entry_addon_not_running( - hass: HomeAssistant, addon_installed, start_addon -) -> None: - """Test the addon is started if it is not running.""" - mock_integration(hass, MockModule("hassio")) - await async_setup_component(hass, HASSIO_DOMAIN, {}) - - # Setup the config entry - config_entry = MockConfigEntry( - data={}, - domain=DOMAIN, - options={}, - title="Home Assistant Yellow", - ) - config_entry.add_to_hass(hass) - with ( - patch( - "homeassistant.components.homeassistant_yellow.get_os_info", - return_value={"board": "yellow"}, - ), - patch( - "homeassistant.components.onboarding.async_is_onboarded", return_value=False - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.SETUP_RETRY - start_addon.assert_called_once() diff --git a/tests/components/homekit/test_homekit.py b/tests/components/homekit/test_homekit.py index 93458724c5e..ba8c1919e73 100644 --- a/tests/components/homekit/test_homekit.py +++ b/tests/components/homekit/test_homekit.py @@ -73,6 +73,7 @@ from homeassistant.helpers.entityfilter import ( CONF_INCLUDE_DOMAINS, CONF_INCLUDE_ENTITIES, CONF_INCLUDE_ENTITY_GLOBS, + EntityFilter, convert_filter, ) from homeassistant.setup import async_setup_component @@ -119,7 +120,13 @@ def patch_source_ip(): yield -def _mock_homekit(hass, entry, homekit_mode, entity_filter=None, devices=None): +def _mock_homekit( + hass: HomeAssistant, + entry: MockConfigEntry, + homekit_mode: str, + entity_filter: EntityFilter | None = None, + devices: list[str] | None = None, +) -> HomeKit: return HomeKit( hass=hass, name=BRIDGE_NAME, @@ -136,7 +143,7 @@ def _mock_homekit(hass, entry, homekit_mode, entity_filter=None, devices=None): ) -def _mock_homekit_bridge(hass, entry): +def _mock_homekit_bridge(hass: HomeAssistant, entry: MockConfigEntry) -> HomeKit: homekit = _mock_homekit(hass, entry, HOMEKIT_MODE_BRIDGE) homekit.driver = MagicMock() homekit.iid_storage = MagicMock() diff --git a/tests/components/homekit/test_type_cameras.py b/tests/components/homekit/test_type_cameras.py index a32656e9f2b..a42980ec2af 100644 --- a/tests/components/homekit/test_type_cameras.py +++ b/tests/components/homekit/test_type_cameras.py @@ -1,6 +1,7 @@ """Test different accessory types: Camera.""" import asyncio +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from uuid import UUID @@ -53,12 +54,12 @@ PID_THAT_WILL_NEVER_BE_ALIVE = 2147483647 @pytest.fixture(autouse=True) -async def setup_homeassistant(hass: HomeAssistant): +async def setup_homeassistant(hass: HomeAssistant) -> None: """Set up the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) -async def _async_start_streaming(hass, acc): +async def _async_start_streaming(hass: HomeAssistant, acc: Camera) -> None: """Start streaming a camera.""" acc.set_selected_stream_configuration(MOCK_START_STREAM_TLV) await hass.async_block_till_done() @@ -66,28 +67,35 @@ async def _async_start_streaming(hass, acc): await hass.async_block_till_done() -async def _async_setup_endpoints(hass, acc): +async def _async_setup_endpoints(hass: HomeAssistant, acc: Camera) -> None: """Set camera endpoints.""" acc.set_endpoints(MOCK_END_POINTS_TLV) acc.run() await hass.async_block_till_done() -async def _async_reconfigure_stream(hass, acc, session_info, stream_config): +async def _async_reconfigure_stream( + hass: HomeAssistant, + acc: Camera, + session_info: dict[str, Any], + stream_config: dict[str, Any], +) -> None: """Reconfigure the stream.""" await acc.reconfigure_stream(session_info, stream_config) acc.run() await hass.async_block_till_done() -async def _async_stop_all_streams(hass, acc): +async def _async_stop_all_streams(hass: HomeAssistant, acc: Camera) -> None: """Stop all camera streams.""" await acc.stop() acc.run() await hass.async_block_till_done() -async def _async_stop_stream(hass, acc, session_info): +async def _async_stop_stream( + hass: HomeAssistant, acc: Camera, session_info: dict[str, Any] +) -> None: """Stop a camera stream.""" await acc.stop_stream(session_info) acc.run() diff --git a/tests/components/homekit/test_type_lights.py b/tests/components/homekit/test_type_lights.py index 02532a91e6d..0f85e07c0bb 100644 --- a/tests/components/homekit/test_type_lights.py +++ b/tests/components/homekit/test_type_lights.py @@ -46,7 +46,7 @@ import homeassistant.util.dt as dt_util from tests.common import async_fire_time_changed, async_mock_service -async def _wait_for_light_coalesce(hass): +async def _wait_for_light_coalesce(hass: HomeAssistant) -> None: async_fire_time_changed( hass, dt_util.utcnow() + timedelta(seconds=CHANGE_COALESCE_TIME_WINDOW) ) diff --git a/tests/components/homekit/test_type_thermostats.py b/tests/components/homekit/test_type_thermostats.py index 3a32e94e491..8454610566b 100644 --- a/tests/components/homekit/test_type_thermostats.py +++ b/tests/components/homekit/test_type_thermostats.py @@ -161,6 +161,40 @@ async def test_thermostat(hass: HomeAssistant, hk_driver, events: list[Event]) - assert acc.char_current_temp.value == 23.0 assert acc.char_display_units.value == 0 + hass.states.async_set( + entity_id, + HVACMode.HEAT, + { + **base_attrs, + ATTR_TEMPERATURE: 22.2, + ATTR_CURRENT_TEMPERATURE: 17.8, + ATTR_HVAC_ACTION: HVACAction.PREHEATING, + }, + ) + await hass.async_block_till_done() + assert acc.char_target_temp.value == 22.2 + assert acc.char_current_heat_cool.value == 1 + assert acc.char_target_heat_cool.value == 1 + assert acc.char_current_temp.value == 17.8 + assert acc.char_display_units.value == 0 + + hass.states.async_set( + entity_id, + HVACMode.HEAT, + { + **base_attrs, + ATTR_TEMPERATURE: 22.2, + ATTR_CURRENT_TEMPERATURE: 17.8, + ATTR_HVAC_ACTION: HVACAction.DEFROSTING, + }, + ) + await hass.async_block_till_done() + assert acc.char_target_temp.value == 22.2 + assert acc.char_current_heat_cool.value == 1 + assert acc.char_target_heat_cool.value == 1 + assert acc.char_current_temp.value == 17.8 + assert acc.char_display_units.value == 0 + hass.states.async_set( entity_id, HVACMode.FAN_ONLY, diff --git a/tests/components/homekit_controller/common.py b/tests/components/homekit_controller/common.py index 9aba3ef3225..b94a267104b 100644 --- a/tests/components/homekit_controller/common.py +++ b/tests/components/homekit_controller/common.py @@ -2,6 +2,7 @@ from __future__ import annotations +from collections.abc import Callable from dataclasses import dataclass from datetime import timedelta import logging @@ -12,6 +13,7 @@ from unittest import mock from aiohomekit.controller.abstract import AbstractDescription, AbstractPairing from aiohomekit.hkjson import loads as hkloads from aiohomekit.model import Accessories, AccessoriesState, Accessory +from aiohomekit.model.services import Service from aiohomekit.testing import FakeController, FakePairing from homeassistant.components.device_automation import DeviceAutomationType @@ -177,7 +179,7 @@ class Helper: return state -async def time_changed(hass, seconds): +async def time_changed(hass: HomeAssistant, seconds: int) -> None: """Trigger time changed.""" next_update = dt_util.utcnow() + timedelta(seconds) async_fire_time_changed(hass, next_update) @@ -193,7 +195,7 @@ async def setup_accessories_from_file(hass: HomeAssistant, path: str) -> Accesso return Accessories.from_list(accessories_json) -async def setup_platform(hass): +async def setup_platform(hass: HomeAssistant) -> FakeController: """Load the platform but with a fake Controller API.""" config = {"discovery": {}} @@ -205,7 +207,9 @@ async def setup_platform(hass): return await async_get_controller(hass) -async def setup_test_accessories(hass, accessories, connection=None): +async def setup_test_accessories( + hass: HomeAssistant, accessories: list[Accessory], connection: str | None = None +) -> tuple[MockConfigEntry, AbstractPairing]: """Load a fake homekit device based on captured JSON profile.""" fake_controller = await setup_platform(hass) return await setup_test_accessories_with_controller( @@ -214,8 +218,11 @@ async def setup_test_accessories(hass, accessories, connection=None): async def setup_test_accessories_with_controller( - hass, accessories, fake_controller, connection=None -): + hass: HomeAssistant, + accessories: list[Accessory], + fake_controller: FakeController, + connection: str | None = None, +) -> tuple[MockConfigEntry, AbstractPairing]: """Load a fake homekit device based on captured JSON profile.""" pairing_id = "00:00:00:00:00:00" @@ -277,8 +284,13 @@ async def device_config_changed(hass: HomeAssistant, accessories: Accessories): async def setup_test_component( - hass, aid, setup_accessory, capitalize=False, suffix=None, connection=None -): + hass: HomeAssistant, + aid: int, + setup_accessory: Callable[[Accessory], Service | None], + capitalize: bool = False, + suffix: str | None = None, + connection: str | None = None, +) -> Helper: """Load a fake homekit accessory based on a homekit accessory model. If capitalize is True, property names will be in upper case. diff --git a/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json b/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json new file mode 100644 index 00000000000..65d3126cc4b --- /dev/null +++ b/tests/components/homekit_controller/fixtures/somfy_venetian_blinds.json @@ -0,0 +1,146 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "VELUX Internal Cover", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Netatmo", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "VELUX Internal Cover", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pr"], + "format": "string", + "value": "0.0.0", + "description": "Firmware Revision", + "maxLen": 64 + }, + { + "type": "00000220-0000-1000-8000-0026BB765291", + "iid": 15, + "perms": ["pr"], + "format": "data", + "value": "+nvrOv1cCQU=" + } + ] + }, + { + "iid": 8, + "type": "0000008C-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr"], + "format": "string", + "value": "Venetian Blinds", + "description": "Name", + "maxLen": 64 + }, + { + "type": "0000007C-0000-1000-8000-0026BB765291", + "iid": 11, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Target Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "0000006D-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Current Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "00000072-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Position State", + "minValue": 0, + "maxValue": 2, + "minStep": 1 + }, + { + "type": "0000006C-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr", "ev"], + "format": "int", + "value": 90, + "description": "Current Horizontal Tilt Angle", + "unit": "arcdegrees", + "minValue": -90, + "maxValue": 90, + "minStep": 1 + }, + { + "type": "0000007B-0000-1000-8000-0026BB765291", + "iid": 14, + "perms": ["pr", "pw", "ev"], + "format": "int", + "value": 90, + "description": "Target Horizontal Tilt Angle", + "unit": "arcdegrees", + "minValue": -90, + "maxValue": 90, + "minStep": 1 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json b/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json new file mode 100644 index 00000000000..80b2b34648e --- /dev/null +++ b/tests/components/homekit_controller/fixtures/velux_active_netatmo_co2.json @@ -0,0 +1,162 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "VELUX Sensor", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Netatmo", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "VELUX Sensor", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pr"], + "format": "string", + "value": "16.0.0", + "description": "Firmware Revision", + "maxLen": 64 + }, + { + "type": "00000220-0000-1000-8000-0026BB765291", + "iid": 18, + "perms": ["pr"], + "format": "data", + "value": "+nvrOv1cCQU=" + } + ] + }, + { + "iid": 8, + "type": "0000008A-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr"], + "format": "string", + "value": "Temperature sensor", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000011-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr", "ev"], + "format": "float", + "value": 23.9, + "description": "Current Temperature", + "unit": "celsius", + "minValue": 0.0, + "maxValue": 50.0, + "minStep": 0.1 + } + ] + }, + { + "iid": 11, + "type": "00000082-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr"], + "format": "string", + "value": "Humidity sensor", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000010-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr", "ev"], + "format": "float", + "value": 69.0, + "description": "Current Relative Humidity", + "unit": "percentage", + "minValue": 0.0, + "maxValue": 100.0, + "minStep": 1.0 + } + ] + }, + { + "iid": 14, + "type": "00000097-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 15, + "perms": ["pr"], + "format": "string", + "value": "Carbon Dioxide sensor", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000092-0000-1000-8000-0026BB765291", + "iid": 16, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Carbon Dioxide Detected", + "minValue": 0, + "maxValue": 1, + "minStep": 1 + }, + { + "type": "00000093-0000-1000-8000-0026BB765291", + "iid": 17, + "perms": ["pr", "ev"], + "format": "float", + "value": 1124.0, + "description": "Carbon Dioxide Level", + "minValue": 0.0, + "maxValue": 5000.0 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/fixtures/velux_window.json b/tests/components/homekit_controller/fixtures/velux_window.json new file mode 100644 index 00000000000..4d9a09344bb --- /dev/null +++ b/tests/components/homekit_controller/fixtures/velux_window.json @@ -0,0 +1,122 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "VELUX Window", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Netatmo", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "VELUX Window", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pr"], + "format": "string", + "value": "0.0.0", + "description": "Firmware Revision", + "maxLen": 64 + }, + { + "type": "00000220-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr"], + "format": "data", + "value": "+nvrOv1cCQU=" + } + ] + }, + { + "iid": 8, + "type": "0000008B-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr"], + "format": "string", + "value": "Roof Window", + "description": "Name", + "maxLen": 64 + }, + { + "type": "0000007C-0000-1000-8000-0026BB765291", + "iid": 11, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Target Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "0000006D-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Current Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "00000072-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Position State", + "minValue": 0, + "maxValue": 2, + "minStep": 1 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/fixtures/velux_window_cover.json b/tests/components/homekit_controller/fixtures/velux_window_cover.json new file mode 100644 index 00000000000..d95fbbd42bf --- /dev/null +++ b/tests/components/homekit_controller/fixtures/velux_window_cover.json @@ -0,0 +1,122 @@ +[ + { + "aid": 1, + "services": [ + { + "iid": 1, + "type": "0000003E-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 2, + "perms": ["pr"], + "format": "string", + "value": "VELUX External Cover", + "description": "Name", + "maxLen": 64 + }, + { + "type": "00000020-0000-1000-8000-0026BB765291", + "iid": 3, + "perms": ["pr"], + "format": "string", + "value": "Netatmo", + "description": "Manufacturer", + "maxLen": 64 + }, + { + "type": "00000021-0000-1000-8000-0026BB765291", + "iid": 4, + "perms": ["pr"], + "format": "string", + "value": "VELUX External Cover", + "description": "Model", + "maxLen": 64 + }, + { + "type": "00000030-0000-1000-8000-0026BB765291", + "iid": 5, + "perms": ["pr"], + "format": "string", + "value": "**REDACTED**", + "description": "Serial Number", + "maxLen": 64 + }, + { + "type": "00000014-0000-1000-8000-0026BB765291", + "iid": 7, + "perms": ["pw"], + "format": "bool", + "description": "Identify" + }, + { + "type": "00000052-0000-1000-8000-0026BB765291", + "iid": 6, + "perms": ["pr"], + "format": "string", + "value": "15.0.0", + "description": "Firmware Revision", + "maxLen": 64 + }, + { + "type": "00000220-0000-1000-8000-0026BB765291", + "iid": 13, + "perms": ["pr"], + "format": "data", + "value": "+nvrOv1cCQU=" + } + ] + }, + { + "iid": 8, + "type": "0000008C-0000-1000-8000-0026BB765291", + "characteristics": [ + { + "type": "00000023-0000-1000-8000-0026BB765291", + "iid": 9, + "perms": ["pr"], + "format": "string", + "value": "Awning Blinds", + "description": "Name", + "maxLen": 64 + }, + { + "type": "0000007C-0000-1000-8000-0026BB765291", + "iid": 11, + "perms": ["pr", "pw", "ev"], + "format": "uint8", + "value": 0, + "description": "Target Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "0000006D-0000-1000-8000-0026BB765291", + "iid": 10, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 0, + "description": "Current Position", + "unit": "percentage", + "minValue": 0, + "maxValue": 100, + "minStep": 1 + }, + { + "type": "00000072-0000-1000-8000-0026BB765291", + "iid": 12, + "perms": ["pr", "ev"], + "format": "uint8", + "value": 2, + "description": "Position State", + "minValue": 0, + "maxValue": 2, + "minStep": 1 + } + ] + } + ] + } +] diff --git a/tests/components/homekit_controller/snapshots/test_init.ambr b/tests/components/homekit_controller/snapshots/test_init.ambr index 2e96295a0ab..6a0fead65d3 100644 --- a/tests/components/homekit_controller/snapshots/test_init.ambr +++ b/tests/components/homekit_controller/snapshots/test_init.ambr @@ -7010,324 +7010,6 @@ }), ]) # --- -# name: test_snapshots[haa_fan] - list([ - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:1', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'José A. Jiménez Campos', - 'model': 'RavenSystem HAA', - 'name': 'HAA-C718B3', - 'name_by_user': None, - 'serial_number': 'C718B3-1', - 'suggested_area': None, - 'sw_version': '5.0.18', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_identify', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'HAA-C718B3 Identify', - }), - 'entity_id': 'button.haa_c718b3_identify', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_setup', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3 Setup', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'setup', - 'unique_id': '00:00:00:00:00:00_1_1010_1012', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3 Setup', - }), - 'entity_id': 'button.haa_c718b3_setup', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_update', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Update', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_1010_1011', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'update', - 'friendly_name': 'HAA-C718B3 Update', - }), - 'entity_id': 'button.haa_c718b3_update', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': dict({ - 'preset_modes': None, - }), - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'fan', - 'entity_category': None, - 'entity_id': 'fan.haa_c718b3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_1_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3', - 'percentage': 66, - 'percentage_step': 33.333333333333336, - 'preset_mode': None, - 'preset_modes': None, - 'supported_features': , - }), - 'entity_id': 'fan.haa_c718b3', - 'state': 'on', - }), - }), - ]), - }), - dict({ - 'device': dict({ - 'area_id': None, - 'config_entries': list([ - 'TestData', - ]), - 'configuration_url': None, - 'connections': list([ - ]), - 'disabled_by': None, - 'entry_type': None, - 'hw_version': '', - 'identifiers': list([ - list([ - 'homekit_controller:accessory-id', - '00:00:00:00:00:00:aid:2', - ]), - ]), - 'is_new': False, - 'labels': list([ - ]), - 'manufacturer': 'José A. Jiménez Campos', - 'model': 'RavenSystem HAA', - 'name': 'HAA-C718B3', - 'name_by_user': None, - 'serial_number': 'C718B3-2', - 'suggested_area': None, - 'sw_version': '5.0.18', - }), - 'entities': list([ - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.haa_c718b3_identify_2', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'HAA-C718B3 Identify', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_1_7', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'device_class': 'identify', - 'friendly_name': 'HAA-C718B3 Identify', - }), - 'entity_id': 'button.haa_c718b3_identify_2', - 'state': 'unknown', - }), - }), - dict({ - 'entry': dict({ - 'aliases': list([ - ]), - 'area_id': None, - 'capabilities': None, - 'categories': dict({ - }), - 'config_entry_id': 'TestData', - 'device_class': None, - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.haa_c718b3', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'labels': list([ - ]), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'HAA-C718B3', - 'platform': 'homekit_controller', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': '00:00:00:00:00:00_2_8', - 'unit_of_measurement': None, - }), - 'state': dict({ - 'attributes': dict({ - 'friendly_name': 'HAA-C718B3', - }), - 'entity_id': 'switch.haa_c718b3', - 'state': 'off', - }), - }), - ]), - }), - ]) -# --- # name: test_snapshots[home_assistant_bridge_basic_cover] list([ dict({ @@ -17954,6 +17636,342 @@ }), ]) # --- +# name: test_snapshots[somfy_venetian_blinds] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_internal_cover_venetian_blinds', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Venetian Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'current_tilt_position': 100, + 'friendly_name': 'VELUX Internal Cover Venetian Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_internal_cover_venetian_blinds', + 'state': 'closed', + }), + }), + ]), + }), + ]) +# --- +# name: test_snapshots[velux_active_netatmo_co2] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Sensor', + 'model_id': None, + 'name': 'VELUX Sensor', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '16.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_sensor_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Sensor Identify', + }), + 'entity_id': 'button.velux_sensor_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Carbon Dioxide sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_14', + 'unit_of_measurement': 'ppm', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'state': '1124.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Humidity sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_11', + 'unit_of_measurement': '%', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'friendly_name': 'VELUX Sensor Humidity sensor', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'state': '69.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Temperature sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': , + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'temperature', + 'friendly_name': 'VELUX Sensor Temperature sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'state': '23.9', + }), + }), + ]), + }), + ]) +# --- # name: test_snapshots[velux_gateway] list([ dict({ @@ -18361,6 +18379,1881 @@ }), ]) # --- +# name: test_snapshots[velux_somfy_venetian_blinds] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:5', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '15.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_5_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_5_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:8', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_8_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_8_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 45, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds_2', + 'state': 'open', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:11', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '15.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_11_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify_3', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_11_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds_3', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:12', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '15.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_12_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify_4', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_12_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds_4', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Gateway', + 'model_id': None, + 'name': 'VELUX Gateway', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '132.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_gateway_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Gateway Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_6', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Gateway Identify', + }), + 'entity_id': 'button.velux_gateway_identify', + 'state': 'unknown', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:9', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_9_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_internal_cover_venetian_blinds', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Venetian Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_9_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'current_tilt_position': 100, + 'friendly_name': 'VELUX Internal Cover Venetian Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_internal_cover_venetian_blinds', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:13', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_13_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_internal_cover_venetian_blinds_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Venetian Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_13_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 100, + 'current_tilt_position': 0, + 'friendly_name': 'VELUX Internal Cover Venetian Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_internal_cover_venetian_blinds_2', + 'state': 'open', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:14', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_14_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify_3', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_internal_cover_venetian_blinds_3', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Venetian Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_14_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'current_tilt_position': 100, + 'friendly_name': 'VELUX Internal Cover Venetian Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_internal_cover_venetian_blinds_3', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:15', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Internal Cover', + 'model_id': None, + 'name': 'VELUX Internal Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_internal_cover_identify_4', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Internal Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_15_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Internal Cover Identify', + }), + 'entity_id': 'button.velux_internal_cover_identify_4', + 'state': 'unknown', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:2', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Sensor', + 'model_id': None, + 'name': 'VELUX Sensor', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '16.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_sensor_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Sensor Identify', + }), + 'entity_id': 'button.velux_sensor_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Carbon Dioxide sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_14', + 'unit_of_measurement': 'ppm', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor', + 'state': '1124.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Humidity sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_11', + 'unit_of_measurement': '%', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'friendly_name': 'VELUX Sensor Humidity sensor', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'entity_id': 'sensor.velux_sensor_humidity_sensor', + 'state': '69.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Temperature sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_2_8', + 'unit_of_measurement': , + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'temperature', + 'friendly_name': 'VELUX Sensor Temperature sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'entity_id': 'sensor.velux_sensor_temperature_sensor', + 'state': '23.9', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:3', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Sensor', + 'model_id': None, + 'name': 'VELUX Sensor', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '16.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_sensor_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_3_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Sensor Identify', + }), + 'entity_id': 'button.velux_sensor_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Carbon Dioxide sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_3_14', + 'unit_of_measurement': 'ppm', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'carbon_dioxide', + 'friendly_name': 'VELUX Sensor Carbon Dioxide sensor', + 'state_class': , + 'unit_of_measurement': 'ppm', + }), + 'entity_id': 'sensor.velux_sensor_carbon_dioxide_sensor_2', + 'state': '1074.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_humidity_sensor_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Humidity sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_3_11', + 'unit_of_measurement': '%', + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'humidity', + 'friendly_name': 'VELUX Sensor Humidity sensor', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'entity_id': 'sensor.velux_sensor_humidity_sensor_2', + 'state': '64.0', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.velux_sensor_temperature_sensor_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Sensor Temperature sensor', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_3_8', + 'unit_of_measurement': , + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'temperature', + 'friendly_name': 'VELUX Sensor Temperature sensor', + 'state_class': , + 'unit_of_measurement': , + }), + 'entity_id': 'sensor.velux_sensor_temperature_sensor_2', + 'state': '24.5', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:4', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Window', + 'model_id': None, + 'name': 'VELUX Window', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_window_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_4_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Window Identify', + }), + 'entity_id': 'button.velux_window_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_window_roof_window', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Roof Window', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_4_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'device_class': 'window', + 'friendly_name': 'VELUX Window Roof Window', + 'supported_features': , + }), + 'entity_id': 'cover.velux_window_roof_window', + 'state': 'closed', + }), + }), + ]), + }), + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:7', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Window', + 'model_id': None, + 'name': 'VELUX Window', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_window_identify_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_7_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Window Identify', + }), + 'entity_id': 'button.velux_window_identify_2', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_window_roof_window_2', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Roof Window', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_7_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'device_class': 'window', + 'friendly_name': 'VELUX Window Roof Window', + 'supported_features': , + }), + 'entity_id': 'cover.velux_window_roof_window_2', + 'state': 'closed', + }), + }), + ]), + }), + ]) +# --- +# name: test_snapshots[velux_window] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX Window', + 'model_id': None, + 'name': 'VELUX Window', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '0.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_window_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX Window Identify', + }), + 'entity_id': 'button.velux_window_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_window_roof_window', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX Window Roof Window', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'device_class': 'window', + 'friendly_name': 'VELUX Window Roof Window', + 'supported_features': , + }), + 'entity_id': 'cover.velux_window_roof_window', + 'state': 'closed', + }), + }), + ]), + }), + ]) +# --- +# name: test_snapshots[velux_window_cover] + list([ + dict({ + 'device': dict({ + 'area_id': None, + 'config_entries': list([ + 'TestData', + ]), + 'configuration_url': None, + 'connections': list([ + ]), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': '', + 'identifiers': list([ + list([ + 'homekit_controller:accessory-id', + '00:00:00:00:00:00:aid:1', + ]), + ]), + 'is_new': False, + 'labels': list([ + ]), + 'manufacturer': 'Netatmo', + 'model': 'VELUX External Cover', + 'model_id': None, + 'name': 'VELUX External Cover', + 'name_by_user': None, + 'primary_config_entry': 'TestData', + 'serial_number': '**REDACTED**', + 'suggested_area': None, + 'sw_version': '15.0.0', + }), + 'entities': list([ + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.velux_external_cover_identify', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'VELUX External Cover Identify', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_1_7', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'device_class': 'identify', + 'friendly_name': 'VELUX External Cover Identify', + }), + 'entity_id': 'button.velux_external_cover_identify', + 'state': 'unknown', + }), + }), + dict({ + 'entry': dict({ + 'aliases': list([ + ]), + 'area_id': None, + 'capabilities': None, + 'categories': dict({ + }), + 'config_entry_id': 'TestData', + 'device_class': None, + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.velux_external_cover_awning_blinds', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'labels': list([ + ]), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'VELUX External Cover Awning Blinds', + 'platform': 'homekit_controller', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '00:00:00:00:00:00_1_8', + 'unit_of_measurement': None, + }), + 'state': dict({ + 'attributes': dict({ + 'current_position': 0, + 'friendly_name': 'VELUX External Cover Awning Blinds', + 'supported_features': , + }), + 'entity_id': 'cover.velux_external_cover_awning_blinds', + 'state': 'closed', + }), + }), + ]), + }), + ]) +# --- # name: test_snapshots[vocolinc_flowerbud] list([ dict({ diff --git a/tests/components/homekit_controller/test_alarm_control_panel.py b/tests/components/homekit_controller/test_alarm_control_panel.py index d08478641b3..1e9f023fc46 100644 --- a/tests/components/homekit_controller/test_alarm_control_panel.py +++ b/tests/components/homekit_controller/test_alarm_control_panel.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_security_system_service(accessory): +def create_security_system_service(accessory: Accessory) -> None: """Define a security-system characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.SECURITY_SYSTEM) diff --git a/tests/components/homekit_controller/test_binary_sensor.py b/tests/components/homekit_controller/test_binary_sensor.py index 63b35fbe1b8..a46d5eca2f5 100644 --- a/tests/components/homekit_controller/test_binary_sensor.py +++ b/tests/components/homekit_controller/test_binary_sensor.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -12,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_motion_sensor_service(accessory): +def create_motion_sensor_service(accessory: Accessory) -> None: """Define motion characteristics as per page 225 of HAP spec.""" service = accessory.add_service(ServicesTypes.MOTION_SENSOR) @@ -43,7 +44,7 @@ async def test_motion_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.MOTION -def create_contact_sensor_service(accessory): +def create_contact_sensor_service(accessory: Accessory) -> None: """Define contact characteristics.""" service = accessory.add_service(ServicesTypes.CONTACT_SENSOR) @@ -74,7 +75,7 @@ async def test_contact_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.OPENING -def create_smoke_sensor_service(accessory): +def create_smoke_sensor_service(accessory: Accessory) -> None: """Define smoke sensor characteristics.""" service = accessory.add_service(ServicesTypes.SMOKE_SENSOR) @@ -105,7 +106,7 @@ async def test_smoke_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.SMOKE -def create_carbon_monoxide_sensor_service(accessory): +def create_carbon_monoxide_sensor_service(accessory: Accessory) -> None: """Define carbon monoxide sensor characteristics.""" service = accessory.add_service(ServicesTypes.CARBON_MONOXIDE_SENSOR) @@ -138,7 +139,7 @@ async def test_carbon_monoxide_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.CO -def create_occupancy_sensor_service(accessory): +def create_occupancy_sensor_service(accessory: Accessory) -> None: """Define occupancy characteristics.""" service = accessory.add_service(ServicesTypes.OCCUPANCY_SENSOR) @@ -169,7 +170,7 @@ async def test_occupancy_sensor_read_state( assert state.attributes["device_class"] == BinarySensorDeviceClass.OCCUPANCY -def create_leak_sensor_service(accessory): +def create_leak_sensor_service(accessory: Accessory) -> None: """Define leak characteristics.""" service = accessory.add_service(ServicesTypes.LEAK_SENSOR) diff --git a/tests/components/homekit_controller/test_button.py b/tests/components/homekit_controller/test_button.py index 058194a7ebd..18391e00df3 100644 --- a/tests/components/homekit_controller/test_button.py +++ b/tests/components/homekit_controller/test_button.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import Helper, setup_test_component -def create_switch_with_setup_button(accessory): +def create_switch_with_setup_button(accessory: Accessory) -> Service: """Define setup button characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -26,7 +27,7 @@ def create_switch_with_setup_button(accessory): return service -def create_switch_with_ecobee_clear_hold_button(accessory): +def create_switch_with_ecobee_clear_hold_button(accessory: Accessory) -> Service: """Define setup button characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) diff --git a/tests/components/homekit_controller/test_camera.py b/tests/components/homekit_controller/test_camera.py index 6e20c1feb3c..1c57d579dc8 100644 --- a/tests/components/homekit_controller/test_camera.py +++ b/tests/components/homekit_controller/test_camera.py @@ -3,6 +3,7 @@ import base64 from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.services import ServicesTypes from aiohomekit.testing import FAKE_CAMERA_IMAGE @@ -13,7 +14,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_camera(accessory): +def create_camera(accessory: Accessory) -> None: """Define camera characteristics.""" accessory.add_service(ServicesTypes.CAMERA_RTP_STREAM_MANAGEMENT) diff --git a/tests/components/homekit_controller/test_climate.py b/tests/components/homekit_controller/test_climate.py index 183e020eb25..29033887953 100644 --- a/tests/components/homekit_controller/test_climate.py +++ b/tests/components/homekit_controller/test_climate.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( ActivationStateValues, CharacteristicsTypes, @@ -28,7 +29,7 @@ from .common import setup_test_component # Test thermostat devices -def create_thermostat_service(accessory): +def create_thermostat_service(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) @@ -66,7 +67,7 @@ def create_thermostat_service(accessory): char.value = 0 -def create_thermostat_service_min_max(accessory): +def create_thermostat_service_min_max(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) char = service.add_char(CharacteristicsTypes.HEATING_COOLING_TARGET) @@ -86,7 +87,7 @@ async def test_climate_respect_supported_op_modes_1( assert state.attributes["hvac_modes"] == ["off", "heat"] -def create_thermostat_service_valid_vals(accessory): +def create_thermostat_service_valid_vals(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) char = service.add_char(CharacteristicsTypes.HEATING_COOLING_TARGET) @@ -364,7 +365,7 @@ async def test_climate_cannot_set_thermostat_temp_range_in_wrong_mode( ) -def create_thermostat_single_set_point_auto(accessory): +def create_thermostat_single_set_point_auto(accessory: Accessory) -> None: """Define thermostat characteristics with a single set point in auto.""" service = accessory.add_service(ServicesTypes.THERMOSTAT) @@ -685,7 +686,7 @@ async def test_hvac_mode_vs_hvac_action_current_mode_wrong( assert state.attributes["hvac_action"] == "idle" -def create_heater_cooler_service(accessory): +def create_heater_cooler_service(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) @@ -719,7 +720,7 @@ def create_heater_cooler_service(accessory): # Test heater-cooler devices -def create_heater_cooler_service_min_max(accessory): +def create_heater_cooler_service_min_max(accessory: Accessory) -> None: """Define thermostat characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) char = service.add_char(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE) @@ -739,7 +740,7 @@ async def test_heater_cooler_respect_supported_op_modes_1( assert state.attributes["hvac_modes"] == ["heat", "cool", "off"] -def create_theater_cooler_service_valid_vals(accessory): +def create_theater_cooler_service_valid_vals(accessory: Accessory) -> None: """Define heater-cooler characteristics.""" service = accessory.add_service(ServicesTypes.HEATER_COOLER) char = service.add_char(CharacteristicsTypes.TARGET_HEATER_COOLER_STATE) diff --git a/tests/components/homekit_controller/test_config_flow.py b/tests/components/homekit_controller/test_config_flow.py index 420c9d45803..8c83d8e4b1b 100644 --- a/tests/components/homekit_controller/test_config_flow.py +++ b/tests/components/homekit_controller/test_config_flow.py @@ -2,6 +2,7 @@ import asyncio from ipaddress import ip_address +from typing import Any import unittest.mock from unittest.mock import AsyncMock, patch @@ -160,7 +161,9 @@ def test_valid_pairing_codes(pairing_code) -> None: assert len(valid_pin[2]) == 3 -def get_flow_context(hass, result): +def get_flow_context( + hass: HomeAssistant, result: config_flow.ConfigFlowResult +) -> dict[str, Any]: """Get the flow context from the result of async_init or async_configure.""" flow = next( flow diff --git a/tests/components/homekit_controller/test_connection.py b/tests/components/homekit_controller/test_connection.py index 8d3cc02fab9..7ea791f9a1e 100644 --- a/tests/components/homekit_controller/test_connection.py +++ b/tests/components/homekit_controller/test_connection.py @@ -5,12 +5,14 @@ import dataclasses from unittest import mock from aiohomekit.controller import TransportType +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.testing import FakeController import pytest from homeassistant.components.homekit_controller.const import ( + DEBOUNCE_COOLDOWN, DOMAIN, IDENTIFIER_ACCESSORY_ID, IDENTIFIER_LEGACY_ACCESSORY_ID, @@ -21,12 +23,14 @@ from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.helpers.entity_component import async_update_entity from .common import ( setup_accessories_from_file, setup_platform, setup_test_accessories, setup_test_component, + time_changed, ) from tests.common import MockConfigEntry @@ -349,7 +353,7 @@ async def test_poll_firmware_version_only_all_watchable_accessory_mode( ) -> None: """Test that we only poll firmware if available and all chars are watchable accessory mode.""" - def _create_accessory(accessory): + def _create_accessory(accessory: Accessory) -> Service: service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") on_char = service.add_char(CharacteristicsTypes.ON) @@ -398,3 +402,40 @@ async def test_poll_firmware_version_only_all_watchable_accessory_mode( state = await helper.poll_and_get_state() assert state.state == STATE_OFF assert mock_get_characteristics.call_count == 8 + + +async def test_manual_poll_all_chars( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that a manual poll will check all chars.""" + + def _create_accessory(accessory: Accessory) -> Service: + service = accessory.add_service(ServicesTypes.LIGHTBULB, name="TestDevice") + + on_char = service.add_char(CharacteristicsTypes.ON) + on_char.value = 0 + + brightness = service.add_char(CharacteristicsTypes.BRIGHTNESS) + brightness.value = 0 + + return service + + helper = await setup_test_component(hass, get_next_aid(), _create_accessory) + + with mock.patch.object( + helper.pairing, + "get_characteristics", + wraps=helper.pairing.get_characteristics, + ) as mock_get_characteristics: + # Initial state is that the light is off + await helper.poll_and_get_state() + # Verify only firmware version is polled + assert mock_get_characteristics.call_args_list[0][0][0] == {(1, 7)} + + # Now do a manual poll to ensure all chars are polled + mock_get_characteristics.reset_mock() + await async_update_entity(hass, helper.entity_id) + await time_changed(hass, 60) + await time_changed(hass, DEBOUNCE_COOLDOWN) + await hass.async_block_till_done() + assert len(mock_get_characteristics.call_args_list[0][0][0]) > 1 diff --git a/tests/components/homekit_controller/test_cover.py b/tests/components/homekit_controller/test_cover.py index c819eac1f5a..11870c801e1 100644 --- a/tests/components/homekit_controller/test_cover.py +++ b/tests/components/homekit_controller/test_cover.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.const import STATE_UNAVAILABLE from homeassistant.core import HomeAssistant @@ -12,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_window_covering_service(accessory): +def create_window_covering_service(accessory: Accessory) -> Service: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.WINDOW_COVERING) @@ -37,7 +38,7 @@ def create_window_covering_service(accessory): return service -def create_window_covering_service_with_h_tilt(accessory): +def create_window_covering_service_with_h_tilt(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -52,7 +53,7 @@ def create_window_covering_service_with_h_tilt(accessory): tilt_target.maxValue = 90 -def create_window_covering_service_with_h_tilt_2(accessory): +def create_window_covering_service_with_h_tilt_2(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -67,7 +68,7 @@ def create_window_covering_service_with_h_tilt_2(accessory): tilt_target.maxValue = 0 -def create_window_covering_service_with_v_tilt(accessory): +def create_window_covering_service_with_v_tilt(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -82,7 +83,7 @@ def create_window_covering_service_with_v_tilt(accessory): tilt_target.maxValue = 90 -def create_window_covering_service_with_v_tilt_2(accessory): +def create_window_covering_service_with_v_tilt_2(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec.""" service = create_window_covering_service(accessory) @@ -97,7 +98,7 @@ def create_window_covering_service_with_v_tilt_2(accessory): tilt_target.maxValue = 0 -def create_window_covering_service_with_none_tilt(accessory): +def create_window_covering_service_with_none_tilt(accessory: Accessory) -> None: """Define a window-covering characteristics as per page 219 of HAP spec. This accessory uses None for the tilt value unexpectedly. @@ -115,6 +116,32 @@ def create_window_covering_service_with_none_tilt(accessory): tilt_target.maxValue = 0 +def create_window_covering_service_with_no_minmax_tilt(accessory): + """Apply use values (-90 to 90) if min/max not provided.""" + service = create_window_covering_service(accessory) + + tilt_current = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT) + tilt_current.value = 0 + + tilt_target = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_TARGET) + tilt_target.value = 0 + + +def create_window_covering_service_with_full_range_tilt(accessory): + """Somfi Velux Integration.""" + service = create_window_covering_service(accessory) + + tilt_current = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_CURRENT) + tilt_current.value = 0 + tilt_current.minValue = -90 + tilt_current.maxValue = 90 + + tilt_target = service.add_char(CharacteristicsTypes.HORIZONTAL_TILT_TARGET) + tilt_target.value = 0 + tilt_target.minValue = -90 + tilt_target.maxValue = 90 + + async def test_change_window_cover_state( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: @@ -266,6 +293,40 @@ async def test_read_window_cover_tilt_missing_tilt( assert state.state != STATE_UNAVAILABLE +async def test_read_window_cover_tilt_full_range( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that horizontal tilt is handled correctly.""" + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service_with_full_range_tilt + ) + + await helper.async_update( + ServicesTypes.WINDOW_COVERING, + {CharacteristicsTypes.HORIZONTAL_TILT_CURRENT: 0}, + ) + state = await helper.poll_and_get_state() + # Expect converted value from arcdegree scale to percentage scale. + assert state.attributes["current_tilt_position"] == 50 + + +async def test_read_window_cover_tilt_no_minmax( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that horizontal tilt is handled correctly.""" + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service_with_no_minmax_tilt + ) + + await helper.async_update( + ServicesTypes.WINDOW_COVERING, + {CharacteristicsTypes.HORIZONTAL_TILT_CURRENT: 90}, + ) + state = await helper.poll_and_get_state() + # Expect converted value from arcdegree scale to percentage scale. + assert state.attributes["current_tilt_position"] == 100 + + async def test_write_window_cover_tilt_horizontal( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: @@ -358,6 +419,29 @@ async def test_write_window_cover_tilt_vertical_2( ) +async def test_write_window_cover_tilt_no_minmax( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that horizontal tilt is written correctly.""" + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service_with_no_minmax_tilt + ) + + await hass.services.async_call( + "cover", + "set_cover_tilt_position", + {"entity_id": helper.entity_id, "tilt_position": 90}, + blocking=True, + ) + # Expect converted value from percentage scale to arcdegree scale. + helper.async_assert_service_values( + ServicesTypes.WINDOW_COVERING, + { + CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 72, + }, + ) + + async def test_window_cover_stop( hass: HomeAssistant, get_next_aid: Callable[[], int] ) -> None: @@ -377,7 +461,58 @@ async def test_window_cover_stop( ) -def create_garage_door_opener_service(accessory): +async def test_write_window_cover_tilt_full_range( + hass: HomeAssistant, get_next_aid: Callable[[], int] +) -> None: + """Test that full-range tilt is working correctly.""" + helper = await setup_test_component( + hass, get_next_aid(), create_window_covering_service_with_full_range_tilt + ) + + await hass.services.async_call( + "cover", + "set_cover_tilt_position", + {"entity_id": helper.entity_id, "tilt_position": 10}, + blocking=True, + ) + # Expect converted value from percentage scale to arc on -90 to +90 scale. + helper.async_assert_service_values( + ServicesTypes.WINDOW_COVERING, + { + CharacteristicsTypes.HORIZONTAL_TILT_TARGET: -72, + }, + ) + + await hass.services.async_call( + "cover", + "set_cover_tilt_position", + {"entity_id": helper.entity_id, "tilt_position": 50}, + blocking=True, + ) + # Expect converted value from percentage scale to arc on -90 to +90 scale. + helper.async_assert_service_values( + ServicesTypes.WINDOW_COVERING, + { + CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 0, + }, + ) + + await hass.services.async_call( + "cover", + "set_cover_tilt_position", + {"entity_id": helper.entity_id, "tilt_position": 90}, + blocking=True, + ) + # Expect converted value from percentage scale to arc on -90 to +90 scale. + helper.async_assert_service_values( + ServicesTypes.WINDOW_COVERING, + { + CharacteristicsTypes.HORIZONTAL_TILT_TARGET: 72, + }, + ) + + +def create_garage_door_opener_service(accessory: Accessory) -> None: """Define a garage-door-opener chars as per page 217 of HAP spec.""" service = accessory.add_service(ServicesTypes.GARAGE_DOOR_OPENER) diff --git a/tests/components/homekit_controller/test_device_trigger.py b/tests/components/homekit_controller/test_device_trigger.py index ecf34868b6c..ba952ac5913 100644 --- a/tests/components/homekit_controller/test_device_trigger.py +++ b/tests/components/homekit_controller/test_device_trigger.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes import pytest @@ -25,7 +26,7 @@ def stub_blueprint_populate_autouse(stub_blueprint_populate: None) -> None: """Stub copying the blueprints to the config folder.""" -def create_remote(accessory): +def create_remote(accessory: Accessory) -> None: """Define characteristics for a button (that is inn a group).""" service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL) @@ -50,7 +51,7 @@ def create_remote(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_button(accessory): +def create_button(accessory: Accessory) -> None: """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH) @@ -65,7 +66,7 @@ def create_button(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_doorbell(accessory): +def create_doorbell(accessory: Accessory) -> None: """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.DOORBELL) diff --git a/tests/components/homekit_controller/test_event.py b/tests/components/homekit_controller/test_event.py index 99dcf38fafc..2254845964a 100644 --- a/tests/components/homekit_controller/test_event.py +++ b/tests/components/homekit_controller/test_event.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -12,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_remote(accessory): +def create_remote(accessory: Accessory) -> None: """Define characteristics for a button (that is inn a group).""" service_label = accessory.add_service(ServicesTypes.SERVICE_LABEL) @@ -37,7 +38,7 @@ def create_remote(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_button(accessory): +def create_button(accessory: Accessory) -> None: """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.STATELESS_PROGRAMMABLE_SWITCH) @@ -52,7 +53,7 @@ def create_button(accessory): battery.add_char(CharacteristicsTypes.BATTERY_LEVEL) -def create_doorbell(accessory): +def create_doorbell(accessory: Accessory) -> None: """Define a button (that is not in a group).""" button = accessory.add_service(ServicesTypes.DOORBELL) diff --git a/tests/components/homekit_controller/test_fan.py b/tests/components/homekit_controller/test_fan.py index 8de447144af..2c498e1a9c1 100644 --- a/tests/components/homekit_controller/test_fan.py +++ b/tests/components/homekit_controller/test_fan.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_fan_service(accessory): +def create_fan_service(accessory: Accessory) -> None: """Define fan v1 characteristics as per HAP spec. This service is no longer documented in R2 of the public HAP spec but existing @@ -29,7 +30,7 @@ def create_fan_service(accessory): speed.value = 0 -def create_fanv2_service(accessory): +def create_fanv2_service(accessory: Accessory) -> None: """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -46,7 +47,7 @@ def create_fanv2_service(accessory): swing_mode.value = 0 -def create_fanv2_service_non_standard_rotation_range(accessory): +def create_fanv2_service_non_standard_rotation_range(accessory: Accessory) -> None: """Define fan v2 with a non-standard rotation range.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -60,7 +61,7 @@ def create_fanv2_service_non_standard_rotation_range(accessory): speed.minStep = 1 -def create_fanv2_service_with_min_step(accessory): +def create_fanv2_service_with_min_step(accessory: Accessory) -> None: """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) @@ -78,7 +79,7 @@ def create_fanv2_service_with_min_step(accessory): swing_mode.value = 0 -def create_fanv2_service_without_rotation_speed(accessory): +def create_fanv2_service_without_rotation_speed(accessory: Accessory) -> None: """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) diff --git a/tests/components/homekit_controller/test_humidifier.py b/tests/components/homekit_controller/test_humidifier.py index a031086e93d..4b429959c67 100644 --- a/tests/components/homekit_controller/test_humidifier.py +++ b/tests/components/homekit_controller/test_humidifier.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.components.humidifier import DOMAIN, MODE_AUTO, MODE_NORMAL from homeassistant.core import HomeAssistant @@ -12,7 +13,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_humidifier_service(accessory): +def create_humidifier_service(accessory: Accessory) -> Service: """Define a humidifier characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.HUMIDIFIER_DEHUMIDIFIER) @@ -39,7 +40,7 @@ def create_humidifier_service(accessory): return service -def create_dehumidifier_service(accessory): +def create_dehumidifier_service(accessory: Accessory) -> Service: """Define a dehumidifier characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.HUMIDIFIER_DEHUMIDIFIER) diff --git a/tests/components/homekit_controller/test_init.py b/tests/components/homekit_controller/test_init.py index c443e56b3a4..2a017b8d592 100644 --- a/tests/components/homekit_controller/test_init.py +++ b/tests/components/homekit_controller/test_init.py @@ -8,7 +8,7 @@ from unittest.mock import patch from aiohomekit import AccessoryNotFoundError from aiohomekit.model import Accessory, Transport from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.testing import FakePairing from attr import asdict import pytest @@ -40,7 +40,7 @@ ALIVE_DEVICE_NAME = "testdevice" ALIVE_DEVICE_ENTITY_ID = "light.testdevice" -def create_motion_sensor_service(accessory): +def create_motion_sensor_service(accessory: Accessory) -> None: """Define motion characteristics as per page 225 of HAP spec.""" service = accessory.add_service(ServicesTypes.MOTION_SENSOR) cur_state = service.add_char(CharacteristicsTypes.MOTION_DETECTED) @@ -83,7 +83,7 @@ async def test_async_remove_entry( assert hkid not in hass.data[ENTITY_MAP].storage_data -def create_alive_service(accessory): +def create_alive_service(accessory: Accessory) -> Service: """Create a service to validate we can only remove dead devices.""" service = accessory.add_service(ServicesTypes.LIGHTBULB, name=ALIVE_DEVICE_NAME) service.add_char(CharacteristicsTypes.ON) diff --git a/tests/components/homekit_controller/test_light.py b/tests/components/homekit_controller/test_light.py index 04f4d3f5e29..a4a5b59d5cb 100644 --- a/tests/components/homekit_controller/test_light.py +++ b/tests/components/homekit_controller/test_light.py @@ -3,8 +3,9 @@ from collections.abc import Callable from unittest import mock +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.testing import FakeController from homeassistant.components.homekit_controller.const import KNOWN_DEVICES @@ -23,7 +24,7 @@ LIGHT_BULB_NAME = "TestDevice" LIGHT_BULB_ENTITY_ID = "light.testdevice" -def create_lightbulb_service(accessory): +def create_lightbulb_service(accessory: Accessory) -> Service: """Define lightbulb characteristics.""" service = accessory.add_service(ServicesTypes.LIGHTBULB, name=LIGHT_BULB_NAME) @@ -36,7 +37,7 @@ def create_lightbulb_service(accessory): return service -def create_lightbulb_service_with_hs(accessory): +def create_lightbulb_service_with_hs(accessory: Accessory) -> Service: """Define a lightbulb service with hue + saturation.""" service = create_lightbulb_service(accessory) @@ -49,7 +50,7 @@ def create_lightbulb_service_with_hs(accessory): return service -def create_lightbulb_service_with_color_temp(accessory): +def create_lightbulb_service_with_color_temp(accessory: Accessory) -> Service: """Define a lightbulb service with color temp.""" service = create_lightbulb_service(accessory) diff --git a/tests/components/homekit_controller/test_lock.py b/tests/components/homekit_controller/test_lock.py index e56ca5fcffe..0963537c7d0 100644 --- a/tests/components/homekit_controller/test_lock.py +++ b/tests/components/homekit_controller/test_lock.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_lock_service(accessory): +def create_lock_service(accessory: Accessory) -> Service: """Define a lock characteristics as per page 219 of HAP spec.""" service = accessory.add_service(ServicesTypes.LOCK_MECHANISM) diff --git a/tests/components/homekit_controller/test_media_player.py b/tests/components/homekit_controller/test_media_player.py index a7f900217d7..d1d280ef265 100644 --- a/tests/components/homekit_controller/test_media_player.py +++ b/tests/components/homekit_controller/test_media_player.py @@ -2,11 +2,12 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( CharacteristicPermissions, CharacteristicsTypes, ) -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes import pytest from homeassistant.core import HomeAssistant @@ -15,7 +16,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_tv_service(accessory): +def create_tv_service(accessory: Accessory) -> Service: """Define tv characteristics. The TV is not currently documented publicly - this is based on observing really TV's that have HomeKit support. @@ -53,7 +54,7 @@ def create_tv_service(accessory): return tv_service -def create_tv_service_with_target_media_state(accessory): +def create_tv_service_with_target_media_state(accessory: Accessory) -> Service: """Define a TV service that can play/pause/stop without generate remote events.""" service = create_tv_service(accessory) diff --git a/tests/components/homekit_controller/test_number.py b/tests/components/homekit_controller/test_number.py index fcbcc3ca7a8..243b34cfc75 100644 --- a/tests/components/homekit_controller/test_number.py +++ b/tests/components/homekit_controller/test_number.py @@ -2,8 +2,9 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -11,7 +12,7 @@ from homeassistant.helpers import entity_registry as er from .common import Helper, setup_test_component -def create_switch_with_spray_level(accessory): +def create_switch_with_spray_level(accessory: Accessory) -> Service: """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) diff --git a/tests/components/homekit_controller/test_sensor.py b/tests/components/homekit_controller/test_sensor.py index ad896395e75..c40864c9629 100644 --- a/tests/components/homekit_controller/test_sensor.py +++ b/tests/components/homekit_controller/test_sensor.py @@ -3,10 +3,10 @@ from collections.abc import Callable from unittest.mock import patch -from aiohomekit.model import Transport +from aiohomekit.model import Accessory, Transport from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.characteristics.const import ThreadNodeCapabilities, ThreadStatus -from aiohomekit.model.services import ServicesTypes +from aiohomekit.model.services import Service, ServicesTypes from aiohomekit.protocol.statuscodes import HapStatusCode from aiohomekit.testing import FakePairing import pytest @@ -24,7 +24,7 @@ from .common import TEST_DEVICE_SERVICE_INFO, Helper, setup_test_component from tests.components.bluetooth import inject_bluetooth_service_info -def create_temperature_sensor_service(accessory): +def create_temperature_sensor_service(accessory: Accessory) -> None: """Define temperature characteristics.""" service = accessory.add_service(ServicesTypes.TEMPERATURE_SENSOR) @@ -32,7 +32,7 @@ def create_temperature_sensor_service(accessory): cur_state.value = 0 -def create_humidity_sensor_service(accessory): +def create_humidity_sensor_service(accessory: Accessory) -> None: """Define humidity characteristics.""" service = accessory.add_service(ServicesTypes.HUMIDITY_SENSOR) @@ -40,7 +40,7 @@ def create_humidity_sensor_service(accessory): cur_state.value = 0 -def create_light_level_sensor_service(accessory): +def create_light_level_sensor_service(accessory: Accessory) -> None: """Define light level characteristics.""" service = accessory.add_service(ServicesTypes.LIGHT_SENSOR) @@ -48,7 +48,7 @@ def create_light_level_sensor_service(accessory): cur_state.value = 0 -def create_carbon_dioxide_level_sensor_service(accessory): +def create_carbon_dioxide_level_sensor_service(accessory: Accessory) -> None: """Define carbon dioxide level characteristics.""" service = accessory.add_service(ServicesTypes.CARBON_DIOXIDE_SENSOR) @@ -56,7 +56,7 @@ def create_carbon_dioxide_level_sensor_service(accessory): cur_state.value = 0 -def create_battery_level_sensor(accessory): +def create_battery_level_sensor(accessory: Accessory) -> Service: """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.BATTERY_SERVICE) @@ -280,7 +280,7 @@ async def test_battery_low( assert state.attributes["icon"] == "mdi:battery-alert" -def create_switch_with_sensor(accessory): +def create_switch_with_sensor(accessory: Accessory) -> Service: """Define battery level characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) diff --git a/tests/components/homekit_controller/test_storage.py b/tests/components/homekit_controller/test_storage.py index ab7d7afd6fe..97856c2c784 100644 --- a/tests/components/homekit_controller/test_storage.py +++ b/tests/components/homekit_controller/test_storage.py @@ -3,6 +3,7 @@ from collections.abc import Callable from typing import Any +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes @@ -65,7 +66,7 @@ async def test_storage_is_removed_idempotent(hass: HomeAssistant) -> None: assert hkid not in entity_map.storage_data -def create_lightbulb_service(accessory): +def create_lightbulb_service(accessory: Accessory) -> None: """Define lightbulb characteristics.""" service = accessory.add_service(ServicesTypes.LIGHTBULB) on_char = service.add_char(CharacteristicsTypes.ON) diff --git a/tests/components/homekit_controller/test_switch.py b/tests/components/homekit_controller/test_switch.py index 1fc49c5c636..a2586f7355e 100644 --- a/tests/components/homekit_controller/test_switch.py +++ b/tests/components/homekit_controller/test_switch.py @@ -2,6 +2,7 @@ from collections.abc import Callable +from aiohomekit.model import Accessory from aiohomekit.model.characteristics import ( CharacteristicsTypes, InUseValues, @@ -15,7 +16,7 @@ from homeassistant.helpers import entity_registry as er from .common import setup_test_component -def create_switch_service(accessory): +def create_switch_service(accessory: Accessory) -> None: """Define outlet characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) @@ -26,7 +27,7 @@ def create_switch_service(accessory): outlet_in_use.value = False -def create_valve_service(accessory): +def create_valve_service(accessory: Accessory) -> None: """Define valve characteristics.""" service = accessory.add_service(ServicesTypes.VALVE) @@ -43,7 +44,7 @@ def create_valve_service(accessory): remaining.value = 99 -def create_char_switch_service(accessory): +def create_char_switch_service(accessory: Accessory) -> None: """Define swtch characteristics.""" service = accessory.add_service(ServicesTypes.OUTLET) diff --git a/tests/components/homematicip_cloud/conftest.py b/tests/components/homematicip_cloud/conftest.py index a43a342478b..ad3957fea69 100644 --- a/tests/components/homematicip_cloud/conftest.py +++ b/tests/components/homematicip_cloud/conftest.py @@ -8,7 +8,6 @@ from homematicip.aio.home import AsyncHome from homematicip.base.enums import WeatherCondition, WeatherDayTime import pytest -from homeassistant import config_entries from homeassistant.components.homematicip_cloud import ( DOMAIN as HMIPC_DOMAIN, async_setup as hmip_async_setup, @@ -46,7 +45,7 @@ def mock_connection_fixture() -> AsyncConnection: @pytest.fixture(name="hmip_config_entry") -def hmip_config_entry_fixture() -> config_entries.ConfigEntry: +def hmip_config_entry_fixture() -> MockConfigEntry: """Create a mock config entry for homematic ip cloud.""" entry_data = { HMIPC_HAPID: HAPID, @@ -66,8 +65,8 @@ def hmip_config_entry_fixture() -> config_entries.ConfigEntry: @pytest.fixture(name="default_mock_hap_factory") async def default_mock_hap_factory_fixture( - hass: HomeAssistant, mock_connection, hmip_config_entry -) -> HomematicipHAP: + hass: HomeAssistant, mock_connection, hmip_config_entry: MockConfigEntry +) -> HomeFactory: """Create a mocked homematic access point.""" return HomeFactory(hass, mock_connection, hmip_config_entry) @@ -94,7 +93,7 @@ def dummy_config_fixture() -> ConfigType: @pytest.fixture(name="mock_hap_with_service") async def mock_hap_with_service_fixture( - hass: HomeAssistant, default_mock_hap_factory, dummy_config + hass: HomeAssistant, default_mock_hap_factory: HomeFactory, dummy_config ) -> HomematicipHAP: """Create a fake homematic access point with hass services.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap() diff --git a/tests/components/homematicip_cloud/helper.py b/tests/components/homematicip_cloud/helper.py index e7d7350f98e..229b3c20251 100644 --- a/tests/components/homematicip_cloud/helper.py +++ b/tests/components/homematicip_cloud/helper.py @@ -1,6 +1,7 @@ """Helper for HomematicIP Cloud Tests.""" import json +from typing import Any from unittest.mock import Mock, patch from homematicip.aio.class_maps import ( @@ -11,19 +12,19 @@ from homematicip.aio.class_maps import ( from homematicip.aio.device import AsyncDevice from homematicip.aio.group import AsyncGroup from homematicip.aio.home import AsyncHome +from homematicip.base.homematicip_object import HomeMaticIPObject from homematicip.home import Home -from homeassistant import config_entries from homeassistant.components.homematicip_cloud import DOMAIN as HMIPC_DOMAIN from homeassistant.components.homematicip_cloud.generic_entity import ( ATTR_IS_GROUP, ATTR_MODEL_TYPE, ) from homeassistant.components.homematicip_cloud.hap import HomematicipHAP -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.setup import async_setup_component -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture HAPID = "3014F7110000000000000001" HAPPIN = "5678" @@ -31,7 +32,13 @@ AUTH_TOKEN = "1234" FIXTURE_DATA = load_fixture("homematicip_cloud.json", "homematicip_cloud") -def get_and_check_entity_basics(hass, mock_hap, entity_id, entity_name, device_model): +def get_and_check_entity_basics( + hass: HomeAssistant, + mock_hap: HomematicipHAP, + entity_id: str, + entity_name: str, + device_model: str | None, +) -> tuple[State, HomeMaticIPObject | None]: """Get and test basic device.""" ha_state = hass.states.get(entity_id) assert ha_state is not None @@ -50,7 +57,12 @@ def get_and_check_entity_basics(hass, mock_hap, entity_id, entity_name, device_m async def async_manipulate_test_data( - hass, hmip_device, attribute, new_value, channel=1, fire_device=None + hass: HomeAssistant, + hmip_device: HomeMaticIPObject, + attribute: str, + new_value: Any, + channel: int = 1, + fire_device: HomeMaticIPObject | None = None, ): """Set new value on hmip device.""" if channel == 1: @@ -76,7 +88,7 @@ class HomeFactory: self, hass: HomeAssistant, mock_connection, - hmip_config_entry: config_entries.ConfigEntry, + hmip_config_entry: MockConfigEntry, ) -> None: """Initialize the Factory.""" self.hass = hass @@ -132,7 +144,7 @@ class HomeTemplate(Home): def __init__( self, connection=None, home_name="", test_devices=None, test_groups=None - ): + ) -> None: """Init template with connection.""" super().__init__(connection=connection) self.name = home_name diff --git a/tests/components/homematicip_cloud/test_alarm_control_panel.py b/tests/components/homematicip_cloud/test_alarm_control_panel.py index 05d7963cea8..cf27aed7a84 100644 --- a/tests/components/homematicip_cloud/test_alarm_control_panel.py +++ b/tests/components/homematicip_cloud/test_alarm_control_panel.py @@ -1,5 +1,7 @@ """Tests for HomematicIP Cloud alarm control panel.""" +from homematicip.aio.home import AsyncHome + from homeassistant.components.alarm_control_panel import ( DOMAIN as ALARM_CONTROL_PANEL_DOMAIN, ) @@ -13,12 +15,16 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import get_and_check_entity_basics +from .helper import HomeFactory, get_and_check_entity_basics async def _async_manipulate_security_zones( - hass, home, internal_active=False, external_active=False, alarm_triggered=False -): + hass: HomeAssistant, + home: AsyncHome, + internal_active: bool = False, + external_active: bool = False, + alarm_triggered: bool = False, +) -> None: """Set new values on hmip security zones.""" json = home._rawJSONData json["functionalHomes"]["SECURITY_AND_ALARM"]["alarmActive"] = alarm_triggered @@ -50,7 +56,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_alarm_control_panel( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipAlarmControlPanel.""" entity_id = "alarm_control_panel.hmip_alarm_control_panel" diff --git a/tests/components/homematicip_cloud/test_binary_sensor.py b/tests/components/homematicip_cloud/test_binary_sensor.py index 54f8e2141d2..d6ea33ed5fb 100644 --- a/tests/components/homematicip_cloud/test_binary_sensor.py +++ b/tests/components/homematicip_cloud/test_binary_sensor.py @@ -27,7 +27,7 @@ from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -41,7 +41,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_home_cloud_connection_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCloudConnectionSensor.""" entity_id = "binary_sensor.cloud_connection" @@ -64,7 +64,7 @@ async def test_hmip_home_cloud_connection_sensor( async def test_hmip_acceleration_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipAccelerationSensor.""" entity_id = "binary_sensor.garagentor" @@ -103,7 +103,7 @@ async def test_hmip_acceleration_sensor( async def test_hmip_tilt_vibration_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTiltVibrationSensor.""" entity_id = "binary_sensor.garage_neigungs_und_erschutterungssensor" @@ -141,7 +141,7 @@ async def test_hmip_tilt_vibration_sensor( async def test_hmip_contact_interface( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipContactInterface.""" entity_id = "binary_sensor.kontakt_schnittstelle_unterputz_1_fach" @@ -166,7 +166,7 @@ async def test_hmip_contact_interface( async def test_hmip_shutter_contact( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipShutterContact.""" entity_id = "binary_sensor.fenstergriffsensor" @@ -208,7 +208,7 @@ async def test_hmip_shutter_contact( async def test_hmip_shutter_contact_optical( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipShutterContact.""" entity_id = "binary_sensor.sitzplatzture" @@ -240,7 +240,7 @@ async def test_hmip_shutter_contact_optical( async def test_hmip_motion_detector( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipMotionDetector.""" entity_id = "binary_sensor.bewegungsmelder_fur_55er_rahmen_innen" @@ -261,7 +261,7 @@ async def test_hmip_motion_detector( async def test_hmip_presence_detector( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipPresenceDetector.""" entity_id = "binary_sensor.spi_1" @@ -287,7 +287,7 @@ async def test_hmip_presence_detector( async def test_hmip_pluggable_mains_failure_surveillance_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipPresenceDetector.""" entity_id = "binary_sensor.netzausfalluberwachung" @@ -308,7 +308,7 @@ async def test_hmip_pluggable_mains_failure_surveillance_sensor( async def test_hmip_smoke_detector( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSmokeDetector.""" entity_id = "binary_sensor.rauchwarnmelder" @@ -342,7 +342,7 @@ async def test_hmip_smoke_detector( async def test_hmip_water_detector( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipWaterDetector.""" entity_id = "binary_sensor.wassersensor" @@ -378,7 +378,9 @@ async def test_hmip_water_detector( assert ha_state.state == STATE_OFF -async def test_hmip_storm_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_storm_sensor( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipStormSensor.""" entity_id = "binary_sensor.weather_sensor_plus_storm" entity_name = "Weather Sensor – plus Storm" @@ -397,7 +399,9 @@ async def test_hmip_storm_sensor(hass: HomeAssistant, default_mock_hap_factory) assert ha_state.state == STATE_ON -async def test_hmip_rain_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_rain_sensor( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipRainSensor.""" entity_id = "binary_sensor.wettersensor_pro_raining" entity_name = "Wettersensor - pro Raining" @@ -417,7 +421,7 @@ async def test_hmip_rain_sensor(hass: HomeAssistant, default_mock_hap_factory) - async def test_hmip_sunshine_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSunshineSensor.""" entity_id = "binary_sensor.wettersensor_pro_sunshine" @@ -439,7 +443,7 @@ async def test_hmip_sunshine_sensor( async def test_hmip_battery_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSunshineSensor.""" entity_id = "binary_sensor.wohnungsture_battery" @@ -460,7 +464,7 @@ async def test_hmip_battery_sensor( async def test_hmip_security_zone_sensor_group( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSecurityZoneSensorGroup.""" entity_id = "binary_sensor.internal_securityzone" @@ -497,7 +501,7 @@ async def test_hmip_security_zone_sensor_group( async def test_hmip_security_sensor_group( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSecuritySensorGroup.""" entity_id = "binary_sensor.buro_sensors" @@ -571,7 +575,7 @@ async def test_hmip_security_sensor_group( async def test_hmip_multi_contact_interface( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipMultiContactInterface.""" entity_id = "binary_sensor.wired_eingangsmodul_32_fach_channel5" diff --git a/tests/components/homematicip_cloud/test_button.py b/tests/components/homematicip_cloud/test_button.py index 0b5e81dd703..7da86607096 100644 --- a/tests/components/homematicip_cloud/test_button.py +++ b/tests/components/homematicip_cloud/test_button.py @@ -7,11 +7,13 @@ from homeassistant.const import ATTR_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.util import dt as dt_util -from .helper import get_and_check_entity_basics +from .helper import HomeFactory, get_and_check_entity_basics async def test_hmip_garage_door_controller_button( - hass: HomeAssistant, freezer: FrozenDateTimeFactory, default_mock_hap_factory + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + default_mock_hap_factory: HomeFactory, ) -> None: """Test HomematicipGarageDoorControllerButton.""" entity_id = "button.garagentor" diff --git a/tests/components/homematicip_cloud/test_climate.py b/tests/components/homematicip_cloud/test_climate.py index f175e2060df..c059ed4b744 100644 --- a/tests/components/homematicip_cloud/test_climate.py +++ b/tests/components/homematicip_cloud/test_climate.py @@ -28,7 +28,12 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError from homeassistant.setup import async_setup_component -from .helper import HAPID, async_manipulate_test_data, get_and_check_entity_basics +from .helper import ( + HAPID, + HomeFactory, + async_manipulate_test_data, + get_and_check_entity_basics, +) async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -40,7 +45,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_heating_group_heat( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.badezimmer" @@ -257,7 +262,7 @@ async def test_hmip_heating_group_heat( async def test_hmip_heating_group_cool( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.badezimmer" @@ -380,7 +385,7 @@ async def test_hmip_heating_group_cool( async def test_hmip_heating_group_heat_with_switch( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.schlafzimmer" @@ -411,7 +416,7 @@ async def test_hmip_heating_group_heat_with_switch( async def test_hmip_heating_group_heat_with_radiator( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup.""" entity_id = "climate.vorzimmer" @@ -440,7 +445,7 @@ async def test_hmip_heating_group_heat_with_radiator( async def test_hmip_heating_profile_default_name( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test visible profile 1 without a name should be displayed as 'Default'.""" entity_id = "climate.vorzimmer3" @@ -465,7 +470,7 @@ async def test_hmip_heating_profile_default_name( async def test_hmip_heating_profile_naming( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test Heating Profile Naming.""" entity_id = "climate.vorzimmer2" @@ -490,7 +495,7 @@ async def test_hmip_heating_profile_naming( async def test_hmip_heating_profile_name_not_in_list( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test set profile when profile is not in available profiles.""" expected_profile = "Testprofile" @@ -622,20 +627,69 @@ async def test_hmip_climate_services( assert len(home._connection.mock_calls) == 10 not_existing_hap_id = "5555F7110000000000000001" - await hass.services.async_call( - "homematicip_cloud", - "deactivate_vacation", - {"accesspoint_id": not_existing_hap_id}, - blocking=True, - ) - assert home.mock_calls[-1][0] == "deactivate_vacation" - assert home.mock_calls[-1][1] == () + with pytest.raises(ServiceValidationError) as excinfo: + await hass.services.async_call( + "homematicip_cloud", + "deactivate_vacation", + {"accesspoint_id": not_existing_hap_id}, + blocking=True, + ) + assert excinfo.value.translation_domain == HMIPC_DOMAIN + assert excinfo.value.translation_key == "access_point_not_found" # There is no further call on connection. assert len(home._connection.mock_calls) == 10 +async def test_hmip_set_home_cooling_mode( + hass: HomeAssistant, mock_hap_with_service +) -> None: + """Test HomematicipSetHomeCoolingMode.""" + + home = mock_hap_with_service.home + + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": HAPID, "cooling": False}, + blocking=True, + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] == (False,) + assert len(home._connection.mock_calls) == 1 + + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": HAPID, "cooling": True}, + blocking=True, + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] + assert len(home._connection.mock_calls) == 2 + + await hass.services.async_call( + "homematicip_cloud", "set_home_cooling_mode", blocking=True + ) + assert home.mock_calls[-1][0] == "set_cooling" + assert home.mock_calls[-1][1] + assert len(home._connection.mock_calls) == 3 + + not_existing_hap_id = "5555F7110000000000000001" + with pytest.raises(ServiceValidationError) as excinfo: + await hass.services.async_call( + "homematicip_cloud", + "set_home_cooling_mode", + {"accesspoint_id": not_existing_hap_id, "cooling": True}, + blocking=True, + ) + assert excinfo.value.translation_domain == HMIPC_DOMAIN + assert excinfo.value.translation_key == "access_point_not_found" + # There is no further call on connection. + assert len(home._connection.mock_calls) == 3 + + async def test_hmip_heating_group_services( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingGroup services.""" entity_id = "climate.badezimmer" diff --git a/tests/components/homematicip_cloud/test_cover.py b/tests/components/homematicip_cloud/test_cover.py index ee126dff936..4d32ae547ef 100644 --- a/tests/components/homematicip_cloud/test_cover.py +++ b/tests/components/homematicip_cloud/test_cover.py @@ -12,7 +12,7 @@ from homeassistant.const import STATE_CLOSED, STATE_OPEN, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -24,7 +24,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_cover_shutter( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.broll_1" @@ -90,7 +90,9 @@ async def test_hmip_cover_shutter( assert ha_state.state == STATE_UNKNOWN -async def test_hmip_cover_slats(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_cover_slats( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipCoverSlats.""" entity_id = "cover.sofa_links" entity_name = "Sofa links" @@ -165,7 +167,7 @@ async def test_hmip_cover_slats(hass: HomeAssistant, default_mock_hap_factory) - async def test_hmip_multi_cover_slats( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverSlats.""" entity_id = "cover.wohnzimmer_fenster" @@ -244,7 +246,9 @@ async def test_hmip_multi_cover_slats( assert ha_state.state == STATE_UNKNOWN -async def test_hmip_blind_module(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_blind_module( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipBlindModule.""" entity_id = "cover.sonnenschutz_balkontur" entity_name = "Sonnenschutz Balkontür" @@ -355,7 +359,7 @@ async def test_hmip_blind_module(hass: HomeAssistant, default_mock_hap_factory) async def test_hmip_garage_door_tormatic( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.garage_door_module" @@ -404,7 +408,7 @@ async def test_hmip_garage_door_tormatic( async def test_hmip_garage_door_hoermann( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverShutte.""" entity_id = "cover.garage_door" @@ -453,7 +457,7 @@ async def test_hmip_garage_door_hoermann( async def test_hmip_cover_shutter_group( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipCoverShutteGroup.""" entity_id = "cover.rollos_shuttergroup" @@ -518,7 +522,7 @@ async def test_hmip_cover_shutter_group( async def test_hmip_cover_slats_group( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test slats with HomematicipCoverShutteGroup.""" entity_id = "cover.rollos_shuttergroup" diff --git a/tests/components/homematicip_cloud/test_device.py b/tests/components/homematicip_cloud/test_device.py index 074a30e94b2..25fb31c3c62 100644 --- a/tests/components/homematicip_cloud/test_device.py +++ b/tests/components/homematicip_cloud/test_device.py @@ -17,9 +17,11 @@ from .helper import ( get_and_check_entity_basics, ) +from tests.common import MockConfigEntry + async def test_hmip_load_all_supported_devices( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Ensure that all supported devices could be loaded.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap( @@ -33,7 +35,7 @@ async def test_hmip_remove_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory, + default_mock_hap_factory: HomeFactory, ) -> None: """Test Remove of hmip device.""" entity_id = "light.treppe_ch" @@ -67,8 +69,8 @@ async def test_hmip_add_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory, - hmip_config_entry, + default_mock_hap_factory: HomeFactory, + hmip_config_entry: MockConfigEntry, ) -> None: """Test Remove of hmip device.""" entity_id = "light.treppe_ch" @@ -121,7 +123,7 @@ async def test_hmip_remove_group( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory, + default_mock_hap_factory: HomeFactory, ) -> None: """Test Remove of hmip group.""" entity_id = "switch.strom_group" @@ -149,7 +151,7 @@ async def test_hmip_remove_group( async def test_all_devices_unavailable_when_hap_not_connected( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test make all devices unavaulable when hap is not connected.""" entity_id = "light.treppe_ch" @@ -174,7 +176,9 @@ async def test_all_devices_unavailable_when_hap_not_connected( assert ha_state.state == STATE_UNAVAILABLE -async def test_hap_reconnected(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hap_reconnected( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test reconnect hap.""" entity_id = "light.treppe_ch" entity_name = "Treppe CH" @@ -205,7 +209,7 @@ async def test_hap_reconnected(hass: HomeAssistant, default_mock_hap_factory) -> async def test_hap_with_name( - hass: HomeAssistant, mock_connection, hmip_config_entry + hass: HomeAssistant, mock_connection, hmip_config_entry: MockConfigEntry ) -> None: """Test hap with name.""" home_name = "TestName" @@ -232,7 +236,7 @@ async def test_hap_with_name( async def test_hmip_reset_energy_counter_services( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test reset_energy_counter service.""" entity_id = "switch.pc" @@ -267,7 +271,7 @@ async def test_hmip_multi_area_device( hass: HomeAssistant, entity_registry: er.EntityRegistry, device_registry: dr.DeviceRegistry, - default_mock_hap_factory, + default_mock_hap_factory: HomeFactory, ) -> None: """Test multi area device. Check if devices are created and referenced.""" entity_id = "binary_sensor.wired_eingangsmodul_32_fach_channel5" diff --git a/tests/components/homematicip_cloud/test_hap.py b/tests/components/homematicip_cloud/test_hap.py index 2da32b2844d..ded1bf88292 100644 --- a/tests/components/homematicip_cloud/test_hap.py +++ b/tests/components/homematicip_cloud/test_hap.py @@ -22,7 +22,7 @@ from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant from homeassistant.exceptions import ConfigEntryNotReady -from .helper import HAPID, HAPPIN +from .helper import HAPID, HAPPIN, HomeFactory from tests.common import MockConfigEntry @@ -114,7 +114,7 @@ async def test_hap_setup_connection_error() -> None: async def test_hap_reset_unloads_entry_if_setup( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test calling reset while the entry has been setup.""" mock_hap = await default_mock_hap_factory.async_get_mock_hap() @@ -129,7 +129,7 @@ async def test_hap_reset_unloads_entry_if_setup( async def test_hap_create( - hass: HomeAssistant, hmip_config_entry, simple_mock_home + hass: HomeAssistant, hmip_config_entry: MockConfigEntry, simple_mock_home ) -> None: """Mock AsyncHome to execute get_hap.""" hass.config.components.add(HMIPC_DOMAIN) @@ -141,7 +141,7 @@ async def test_hap_create( async def test_hap_create_exception( - hass: HomeAssistant, hmip_config_entry, mock_connection_init + hass: HomeAssistant, hmip_config_entry: MockConfigEntry, mock_connection_init ) -> None: """Mock AsyncHome to execute get_hap.""" hass.config.components.add(HMIPC_DOMAIN) diff --git a/tests/components/homematicip_cloud/test_init.py b/tests/components/homematicip_cloud/test_init.py index 9303a755e89..07c53248d92 100644 --- a/tests/components/homematicip_cloud/test_init.py +++ b/tests/components/homematicip_cloud/test_init.py @@ -100,7 +100,7 @@ async def test_config_already_registered_not_passed_to_config_entry( async def test_load_entry_fails_due_to_connection_error( - hass: HomeAssistant, hmip_config_entry, mock_connection_init + hass: HomeAssistant, hmip_config_entry: MockConfigEntry, mock_connection_init ) -> None: """Test load entry fails due to connection error.""" hmip_config_entry.add_to_hass(hass) @@ -116,7 +116,7 @@ async def test_load_entry_fails_due_to_connection_error( async def test_load_entry_fails_due_to_generic_exception( - hass: HomeAssistant, hmip_config_entry + hass: HomeAssistant, hmip_config_entry: MockConfigEntry ) -> None: """Test load entry fails due to generic exception.""" hmip_config_entry.add_to_hass(hass) @@ -199,7 +199,7 @@ async def test_setup_services_and_unload_services(hass: HomeAssistant) -> None: # Check services are created hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 config_entries = hass.config_entries.async_entries(HMIPC_DOMAIN) assert len(config_entries) == 1 @@ -232,7 +232,7 @@ async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None: assert await async_setup_component(hass, HMIPC_DOMAIN, {}) hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 config_entries = hass.config_entries.async_entries(HMIPC_DOMAIN) assert len(config_entries) == 2 @@ -241,7 +241,7 @@ async def test_setup_two_haps_unload_one_by_one(hass: HomeAssistant) -> None: # services still exists hmipc_services = hass.services.async_services()[HMIPC_DOMAIN] - assert len(hmipc_services) == 8 + assert len(hmipc_services) == 9 # unload the second AP await hass.config_entries.async_unload(config_entries[1].entry_id) diff --git a/tests/components/homematicip_cloud/test_light.py b/tests/components/homematicip_cloud/test_light.py index 18f002a5dbc..18d490c3786 100644 --- a/tests/components/homematicip_cloud/test_light.py +++ b/tests/components/homematicip_cloud/test_light.py @@ -16,7 +16,7 @@ from homeassistant.const import ATTR_SUPPORTED_FEATURES, STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -27,7 +27,9 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: assert not hass.data.get(HMIPC_DOMAIN) -async def test_hmip_light(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_light( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipLight.""" entity_id = "light.treppe_ch" entity_name = "Treppe CH" @@ -73,7 +75,7 @@ async def test_hmip_light(hass: HomeAssistant, default_mock_hap_factory) -> None async def test_hmip_notification_light( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipNotificationLight.""" entity_id = "light.alarm_status" @@ -171,7 +173,9 @@ async def test_hmip_notification_light( assert not ha_state.attributes.get(ATTR_BRIGHTNESS) -async def test_hmip_dimmer(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_dimmer( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipDimmer.""" entity_id = "light.schlafzimmerlicht" entity_name = "Schlafzimmerlicht" @@ -230,7 +234,7 @@ async def test_hmip_dimmer(hass: HomeAssistant, default_mock_hap_factory) -> Non async def test_hmip_light_measuring( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipLightMeasuring.""" entity_id = "light.flur_oben" @@ -276,7 +280,7 @@ async def test_hmip_light_measuring( async def test_hmip_wired_multi_dimmer( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipMultiDimmer.""" entity_id = "light.raumlich_kuche" @@ -336,7 +340,7 @@ async def test_hmip_wired_multi_dimmer( async def test_hmip_din_rail_dimmer_3_channel1( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 1.""" entity_id = "light.3_dimmer_channel1" @@ -395,7 +399,7 @@ async def test_hmip_din_rail_dimmer_3_channel1( async def test_hmip_din_rail_dimmer_3_channel2( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 2.""" entity_id = "light.3_dimmer_channel2" @@ -454,7 +458,7 @@ async def test_hmip_din_rail_dimmer_3_channel2( async def test_hmip_din_rail_dimmer_3_channel3( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicIP DinRailDimmer3 Channel 3.""" entity_id = "light.esstisch" diff --git a/tests/components/homematicip_cloud/test_lock.py b/tests/components/homematicip_cloud/test_lock.py index f49ad42b013..7035cf979c4 100644 --- a/tests/components/homematicip_cloud/test_lock.py +++ b/tests/components/homematicip_cloud/test_lock.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -29,7 +29,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_doorlockdrive( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipDoorLockDrive.""" entity_id = "lock.haustuer" @@ -87,7 +87,7 @@ async def test_hmip_doorlockdrive( async def test_hmip_doorlockdrive_handle_errors( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipDoorLockDrive.""" entity_id = "lock.haustuer" diff --git a/tests/components/homematicip_cloud/test_sensor.py b/tests/components/homematicip_cloud/test_sensor.py index 2b62c46fd72..4028f6d189e 100644 --- a/tests/components/homematicip_cloud/test_sensor.py +++ b/tests/components/homematicip_cloud/test_sensor.py @@ -36,7 +36,7 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -48,7 +48,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_accesspoint_status( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSwitch.""" entity_id = "sensor.home_control_access_point_duty_cycle" @@ -67,7 +67,7 @@ async def test_hmip_accesspoint_status( async def test_hmip_heating_thermostat( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingThermostat.""" entity_id = "sensor.heizkorperthermostat_heating" @@ -103,7 +103,7 @@ async def test_hmip_heating_thermostat( async def test_hmip_humidity_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHumiditySensor.""" entity_id = "sensor.bwth_1_humidity" @@ -128,7 +128,7 @@ async def test_hmip_humidity_sensor( async def test_hmip_temperature_sensor1( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.bwth_1_temperature" @@ -155,7 +155,7 @@ async def test_hmip_temperature_sensor1( async def test_hmip_temperature_sensor2( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.heizkorperthermostat_temperature" @@ -182,7 +182,7 @@ async def test_hmip_temperature_sensor2( async def test_hmip_temperature_sensor3( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.raumbediengerat_analog_temperature" @@ -209,7 +209,7 @@ async def test_hmip_temperature_sensor3( async def test_hmip_thermostat_evo_heating( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipHeatingThermostat for HmIP-eTRV-E.""" entity_id = "sensor.thermostat_evo_heating" @@ -231,7 +231,7 @@ async def test_hmip_thermostat_evo_heating( async def test_hmip_thermostat_evo_temperature( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureSensor.""" entity_id = "sensor.thermostat_evo_temperature" @@ -256,7 +256,9 @@ async def test_hmip_thermostat_evo_temperature( assert ha_state.attributes[ATTR_TEMPERATURE_OFFSET] == 0.7 -async def test_hmip_power_sensor(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_power_sensor( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipPowerSensor.""" entity_id = "sensor.flur_oben_power" entity_name = "Flur oben Power" @@ -294,7 +296,7 @@ async def test_hmip_power_sensor(hass: HomeAssistant, default_mock_hap_factory) async def test_hmip_illuminance_sensor1( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipIlluminanceSensor.""" entity_id = "sensor.wettersensor_illuminance" @@ -316,7 +318,7 @@ async def test_hmip_illuminance_sensor1( async def test_hmip_illuminance_sensor2( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipIlluminanceSensor.""" entity_id = "sensor.lichtsensor_nord_illuminance" @@ -341,7 +343,7 @@ async def test_hmip_illuminance_sensor2( async def test_hmip_windspeed_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipWindspeedSensor.""" entity_id = "sensor.wettersensor_pro_windspeed" @@ -392,7 +394,7 @@ async def test_hmip_windspeed_sensor( async def test_hmip_today_rain_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTodayRainSensor.""" entity_id = "sensor.weather_sensor_plus_today_rain" @@ -414,7 +416,7 @@ async def test_hmip_today_rain_sensor( async def test_hmip_temperature_external_sensor_channel_1( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureDifferenceSensor Channel 1 HmIP-STE2-PCB.""" entity_id = "sensor.ste2_channel_1_temperature" @@ -439,7 +441,7 @@ async def test_hmip_temperature_external_sensor_channel_1( async def test_hmip_temperature_external_sensor_channel_2( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureDifferenceSensor Channel 2 HmIP-STE2-PCB.""" entity_id = "sensor.ste2_channel_2_temperature" @@ -464,7 +466,7 @@ async def test_hmip_temperature_external_sensor_channel_2( async def test_hmip_temperature_external_sensor_delta( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipTemperatureDifferenceSensor Delta HmIP-STE2-PCB.""" entity_id = "sensor.ste2_delta_temperature" @@ -491,7 +493,7 @@ async def test_hmip_temperature_external_sensor_delta( async def test_hmip_passage_detector_delta_counter( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipPassageDetectorDeltaCounter.""" entity_id = "sensor.spdr_1" @@ -514,7 +516,7 @@ async def test_hmip_passage_detector_delta_counter( async def test_hmip_esi_iec_current_power_consumption( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC currentPowerConsumption Sensor.""" entity_id = "sensor.esi_iec_currentPowerConsumption" @@ -532,7 +534,7 @@ async def test_hmip_esi_iec_current_power_consumption( async def test_hmip_esi_iec_energy_counter_usage_high_tariff( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_usage_high_tariff" @@ -550,7 +552,7 @@ async def test_hmip_esi_iec_energy_counter_usage_high_tariff( async def test_hmip_esi_iec_energy_counter_usage_low_tariff( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC ENERGY_COUNTER_USAGE_LOW_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_usage_low_tariff" @@ -568,7 +570,7 @@ async def test_hmip_esi_iec_energy_counter_usage_low_tariff( async def test_hmip_esi_iec_energy_counter_input_single_tariff( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC ENERGY_COUNTER_INPUT_SINGLE_TARIFF.""" entity_id = "sensor.esi_iec_energy_counter_input_single_tariff" @@ -586,7 +588,7 @@ async def test_hmip_esi_iec_energy_counter_input_single_tariff( async def test_hmip_esi_iec_unknown_channel( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test devices are loaded partially.""" not_existing_entity_id = "sensor.esi_iec2_energy_counter_input_single_tariff" @@ -601,7 +603,7 @@ async def test_hmip_esi_iec_unknown_channel( async def test_hmip_esi_gas_current_gas_flow( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC CurrentGasFlow.""" entity_id = "sensor.esi_gas_currentgasflow" @@ -619,7 +621,7 @@ async def test_hmip_esi_gas_current_gas_flow( async def test_hmip_esi_gas_gas_volume( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC GasVolume.""" entity_id = "sensor.esi_gas_gasvolume" @@ -637,7 +639,7 @@ async def test_hmip_esi_gas_gas_volume( async def test_hmip_esi_led_current_power_consumption( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC currentPowerConsumption Sensor.""" entity_id = "sensor.esi_led_currentPowerConsumption" @@ -655,7 +657,7 @@ async def test_hmip_esi_led_current_power_consumption( async def test_hmip_esi_led_energy_counter_usage_high_tariff( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test ESI-IEC ENERGY_COUNTER_USAGE_HIGH_TARIFF.""" entity_id = "sensor.esi_led_energy_counter_usage_high_tariff" diff --git a/tests/components/homematicip_cloud/test_switch.py b/tests/components/homematicip_cloud/test_switch.py index a249c52393d..e4b51688ba7 100644 --- a/tests/components/homematicip_cloud/test_switch.py +++ b/tests/components/homematicip_cloud/test_switch.py @@ -9,7 +9,7 @@ from homeassistant.const import STATE_OFF, STATE_ON from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -20,7 +20,9 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: assert not hass.data.get(HMIPC_DOMAIN) -async def test_hmip_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_switch( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipSwitch.""" entity_id = "switch.schrank" entity_name = "Schrank" @@ -57,7 +59,9 @@ async def test_hmip_switch(hass: HomeAssistant, default_mock_hap_factory) -> Non assert ha_state.state == STATE_ON -async def test_hmip_switch_input(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_switch_input( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipSwitch.""" entity_id = "switch.wohnzimmer_beleuchtung" entity_name = "Wohnzimmer Beleuchtung" @@ -95,7 +99,7 @@ async def test_hmip_switch_input(hass: HomeAssistant, default_mock_hap_factory) async def test_hmip_switch_measuring( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipSwitchMeasuring.""" entity_id = "switch.pc" @@ -134,7 +138,9 @@ async def test_hmip_switch_measuring( assert ha_state.state == STATE_ON -async def test_hmip_group_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_group_switch( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipGroupSwitch.""" entity_id = "switch.strom_group" entity_name = "Strom Group" @@ -174,7 +180,9 @@ async def test_hmip_group_switch(hass: HomeAssistant, default_mock_hap_factory) assert ha_state.attributes[ATTR_GROUP_MEMBER_UNREACHABLE] -async def test_hmip_multi_switch(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_multi_switch( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipMultiSwitch.""" entity_id = "switch.jalousien_1_kizi_2_schlazi_channel1" entity_name = "Jalousien - 1 KiZi, 2 SchlaZi Channel1" @@ -228,7 +236,7 @@ async def test_hmip_multi_switch(hass: HomeAssistant, default_mock_hap_factory) async def test_hmip_wired_multi_switch( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipMultiSwitch.""" entity_id = "switch.fernseher_wohnzimmer" diff --git a/tests/components/homematicip_cloud/test_weather.py b/tests/components/homematicip_cloud/test_weather.py index 44005afd511..44df907fcc5 100644 --- a/tests/components/homematicip_cloud/test_weather.py +++ b/tests/components/homematicip_cloud/test_weather.py @@ -12,7 +12,7 @@ from homeassistant.const import ATTR_ATTRIBUTION from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -from .helper import async_manipulate_test_data, get_and_check_entity_basics +from .helper import HomeFactory, async_manipulate_test_data, get_and_check_entity_basics async def test_manually_configured_platform(hass: HomeAssistant) -> None: @@ -24,7 +24,7 @@ async def test_manually_configured_platform(hass: HomeAssistant) -> None: async def test_hmip_weather_sensor( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipWeatherSensor.""" entity_id = "weather.weather_sensor_plus" @@ -50,7 +50,7 @@ async def test_hmip_weather_sensor( async def test_hmip_weather_sensor_pro( - hass: HomeAssistant, default_mock_hap_factory + hass: HomeAssistant, default_mock_hap_factory: HomeFactory ) -> None: """Test HomematicipWeatherSensorPro.""" entity_id = "weather.wettersensor_pro" @@ -76,7 +76,9 @@ async def test_hmip_weather_sensor_pro( assert ha_state.attributes[ATTR_WEATHER_TEMPERATURE] == 12.1 -async def test_hmip_home_weather(hass: HomeAssistant, default_mock_hap_factory) -> None: +async def test_hmip_home_weather( + hass: HomeAssistant, default_mock_hap_factory: HomeFactory +) -> None: """Test HomematicipHomeWeather.""" entity_id = "weather.weather_1010_wien_osterreich" entity_name = "Weather 1010 Wien, Österreich" diff --git a/tests/components/homewizard/snapshots/test_sensor.ambr b/tests/components/homewizard/snapshots/test_sensor.ambr index 63ee9312a13..dd50b098d40 100644 --- a/tests/components/homewizard/snapshots/test_sensor.ambr +++ b/tests/components/homewizard/snapshots/test_sensor.ambr @@ -712,7 +712,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH1-entity_ids7][sensor.device_reactive_power:state] @@ -721,7 +721,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -2632,7 +2632,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power:state] @@ -2641,7 +2641,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -2719,7 +2719,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_1:state] @@ -2728,7 +2728,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 1', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_1', @@ -2806,7 +2806,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_2:state] @@ -2815,7 +2815,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 2', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_2', @@ -2893,7 +2893,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-KWH3-entity_ids8][sensor.device_reactive_power_phase_3:state] @@ -2902,7 +2902,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 3', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_3', @@ -14905,7 +14905,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[HWE-SKT-21-entity_ids3][sensor.device_reactive_power:state] @@ -14914,7 +14914,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -16200,7 +16200,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM230-entity_ids5][sensor.device_reactive_power:state] @@ -16209,7 +16209,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -18120,7 +18120,7 @@ 'supported_features': 0, 'translation_key': None, 'unique_id': 'aabbccddeeff_active_reactive_power_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power:state] @@ -18129,7 +18129,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power', @@ -18207,7 +18207,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l1_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_1:state] @@ -18216,7 +18216,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 1', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_1', @@ -18294,7 +18294,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l2_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_2:state] @@ -18303,7 +18303,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 2', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_2', @@ -18381,7 +18381,7 @@ 'supported_features': 0, 'translation_key': 'active_reactive_power_phase_var', 'unique_id': 'aabbccddeeff_active_reactive_power_l3_var', - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }) # --- # name: test_sensors[SDM630-entity_ids6][sensor.device_reactive_power_phase_3:state] @@ -18390,7 +18390,7 @@ 'device_class': 'reactive_power', 'friendly_name': 'Device Reactive power phase 3', 'state_class': , - 'unit_of_measurement': 'var', + 'unit_of_measurement': , }), 'context': , 'entity_id': 'sensor.device_reactive_power_phase_3', diff --git a/tests/components/homewizard/test_config_flow.py b/tests/components/homewizard/test_config_flow.py index 8d12a8a1787..442659f2aad 100644 --- a/tests/components/homewizard/test_config_flow.py +++ b/tests/components/homewizard/test_config_flow.py @@ -341,13 +341,7 @@ async def test_reauth_flow( """Test reauth flow while API is enabled.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -367,13 +361,7 @@ async def test_reauth_error( mock_homewizardenergy.device.side_effect = DisabledError mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/honeywell/test_config_flow.py b/tests/components/honeywell/test_config_flow.py index 7cd987f0d83..ed9c86f5e10 100644 --- a/tests/components/honeywell/test_config_flow.py +++ b/tests/components/honeywell/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.honeywell.const import ( CONF_HEAT_AWAY_TEMPERATURE, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, ConfigEntryState +from homeassistant.config_entries import SOURCE_USER, ConfigEntryState from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -129,21 +129,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: unique_id="test-username", ) mock_entry.add_to_hass(hass) - with patch( - "homeassistant.components.honeywell.async_setup_entry", - return_value=True, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - - await hass.async_block_till_done() + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -177,16 +163,7 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, client: MagicMock) -> ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - await hass.async_block_till_done() + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -226,17 +203,7 @@ async def test_reauth_flow_connnection_error( unique_id="test-username", ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={CONF_USERNAME: "test-username", CONF_PASSWORD: "new-password"}, - ) - await hass.async_block_till_done() - + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/html5/test_notify.py b/tests/components/html5/test_notify.py index f54ec9fa8f7..42ca6067418 100644 --- a/tests/components/html5/test_notify.py +++ b/tests/components/html5/test_notify.py @@ -2,9 +2,11 @@ from http import HTTPStatus import json +from typing import Any from unittest.mock import mock_open, patch from aiohttp.hdrs import AUTHORIZATION +from aiohttp.test_utils import TestClient import homeassistant.components.html5.notify as html5 from homeassistant.core import HomeAssistant @@ -69,7 +71,11 @@ REGISTER_URL = "/api/notify.html5" PUBLISH_URL = "/api/notify.html5/callback" -async def mock_client(hass, hass_client, registrations=None): +async def mock_client( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + registrations: dict[str, Any] | None = None, +) -> TestClient: """Create a test client for HTML5 views.""" if registrations is None: registrations = {} diff --git a/tests/components/http/test_auth.py b/tests/components/http/test_auth.py index 7f29f8a4b9f..76c512c9686 100644 --- a/tests/components/http/test_auth.py +++ b/tests/components/http/test_auth.py @@ -4,6 +4,7 @@ from datetime import timedelta from http import HTTPStatus from ipaddress import ip_network import logging +from typing import Any from unittest.mock import Mock, patch from aiohttp import BasicAuth, web @@ -476,7 +477,11 @@ async def test_auth_access_signed_path_via_websocket( @websocket_api.websocket_command({"type": "diagnostics/list"}) @callback - def get_signed_path(hass, connection, msg): + def get_signed_path( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: connection.send_result( msg["id"], {"path": async_sign_path(hass, "/", timedelta(seconds=5))} ) diff --git a/tests/components/http/test_cors.py b/tests/components/http/test_cors.py index 1188131cc0f..c0256abb25d 100644 --- a/tests/components/http/test_cors.py +++ b/tests/components/http/test_cors.py @@ -119,7 +119,7 @@ async def test_cors_middleware_with_cors_allowed_view(hass: HomeAssistant) -> No requires_auth = False cors_allowed = True - def __init__(self, url, name): + def __init__(self, url, name) -> None: """Initialize test view.""" self.url = url self.name = name diff --git a/tests/components/huawei_lte/test_config_flow.py b/tests/components/huawei_lte/test_config_flow.py index 862af02963c..a9a147eb17e 100644 --- a/tests/components/huawei_lte/test_config_flow.py +++ b/tests/components/huawei_lte/test_config_flow.py @@ -385,15 +385,7 @@ async def test_reauth( ) entry.add_to_hass(hass) - context = { - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - } - result = await hass.config_entries.flow.async_init( - DOMAIN, context=context, data=entry.data - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["data_schema"] is not None diff --git a/tests/components/husqvarna_automower/fixtures/mower.json b/tests/components/husqvarna_automower/fixtures/mower.json index a5cae68f47c..aa8ea2cbef4 100644 --- a/tests/components/husqvarna_automower/fixtures/mower.json +++ b/tests/components/husqvarna_automower/fixtures/mower.json @@ -13,6 +13,7 @@ "batteryPercent": 100 }, "capabilities": { + "canConfirmError": true, "headlights": true, "workAreas": true, "position": true, diff --git a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr index 212be85ce51..3838f2eb960 100644 --- a/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_diagnostics.ambr @@ -9,6 +9,7 @@ dict({ 'end': '2024-03-02T00:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=MO,WE,FR', + 'schedule_no': 1, 'start': '2024-03-01T19:00:00', 'uid': '1140_300_MO,WE,FR', 'work_area_id': None, @@ -17,6 +18,7 @@ dict({ 'end': '2024-03-02T08:00:00', 'rrule': 'FREQ=WEEKLY;BYDAY=TU,TH,SA', + 'schedule_no': 2, 'start': '2024-03-02T00:00:00', 'uid': '0_480_TU,TH,SA', 'work_area_id': None, @@ -53,6 +55,7 @@ ]), }), 'capabilities': dict({ + 'can_confirm_error': True, 'headlights': True, 'position': True, 'stay_out_zones': True, diff --git a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr index c727a49b71a..c260e6beba6 100644 --- a/tests/components/husqvarna_automower/snapshots/test_sensor.ambr +++ b/tests/components/husqvarna_automower/snapshots/test_sensor.ambr @@ -551,64 +551,6 @@ 'state': '2023-06-05T17:00:00+00:00', }) # --- -# name: test_sensor_snapshot[sensor.test_mower_1_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'options': list([ - 'Front lawn', - 'Back lawn', - 'my_lawn', - ]), - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.test_mower_1_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'husqvarna_automower', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'work_area', - 'unique_id': 'c7233734-b219-4287-a173-08e3643f89f0_work_area', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor_snapshot[sensor.test_mower_1_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Test Mower 1 None', - 'options': list([ - 'Front lawn', - 'Back lawn', - 'my_lawn', - ]), - }), - 'context': , - 'entity_id': 'sensor.test_mower_1_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'Front lawn', - }) -# --- # name: test_sensor_snapshot[sensor.test_mower_1_number_of_charging_cycles-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/husqvarna_automower/test_button.py b/tests/components/husqvarna_automower/test_button.py index 6cc465df74b..5cbb9b893a8 100644 --- a/tests/components/husqvarna_automower/test_button.py +++ b/tests/components/husqvarna_automower/test_button.py @@ -34,7 +34,6 @@ from tests.common import ( @pytest.mark.freeze_time(datetime.datetime(2024, 2, 29, 11, tzinfo=datetime.UTC)) -@pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_button_states_and_commands( hass: HomeAssistant, mock_automower_client: AsyncMock, diff --git a/tests/components/hyperion/__init__.py b/tests/components/hyperion/__init__.py index 72aba96e81f..36137ce0ddd 100644 --- a/tests/components/hyperion/__init__.py +++ b/tests/components/hyperion/__init__.py @@ -124,9 +124,9 @@ def add_test_config_entry( hass: HomeAssistant, data: dict[str, Any] | None = None, options: dict[str, Any] | None = None, -) -> ConfigEntry: +) -> MockConfigEntry: """Add a test config entry.""" - config_entry: MockConfigEntry = MockConfigEntry( + config_entry = MockConfigEntry( entry_id=TEST_CONFIG_ENTRY_ID, domain=DOMAIN, data=data diff --git a/tests/components/hyperion/test_config_flow.py b/tests/components/hyperion/test_config_flow.py index 57749f5eedc..4109fe0f653 100644 --- a/tests/components/hyperion/test_config_flow.py +++ b/tests/components/hyperion/test_config_flow.py @@ -20,7 +20,7 @@ from homeassistant.components.hyperion.const import ( DOMAIN, ) from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER from homeassistant.const import ( ATTR_ENTITY_ID, CONF_HOST, @@ -427,7 +427,7 @@ async def test_auth_create_token_approval_declined_task_canceled( class CanceledAwaitableMock(AsyncMock): """A canceled awaitable mock.""" - def __init__(self): + def __init__(self) -> None: super().__init__() self.done = Mock(return_value=False) self.cancel = Mock() @@ -861,12 +861,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: ), patch("homeassistant.components.hyperion.async_setup_entry", return_value=True), ): - result = await _init_flow( - hass, - source=SOURCE_REAUTH, - data=config_data, - ) - await hass.async_block_till_done() + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM result = await _configure_flow( @@ -886,18 +881,13 @@ async def test_reauth_cannot_connect(hass: HomeAssistant) -> None: CONF_PORT: TEST_PORT, } - add_test_config_entry(hass, data=config_data) + config_entry = add_test_config_entry(hass, data=config_data) client = create_mock_client() client.async_client_connect = AsyncMock(return_value=False) with patch( "homeassistant.components.hyperion.client.HyperionClient", return_value=client ): - result = await _init_flow( - hass, - source=SOURCE_REAUTH, - data=config_data, - ) - await hass.async_block_till_done() + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "cannot_connect" diff --git a/tests/components/iaqualink/test_init.py b/tests/components/iaqualink/test_init.py index 8e157b8d1e3..1df199f706a 100644 --- a/tests/components/iaqualink/test_init.py +++ b/tests/components/iaqualink/test_init.py @@ -30,7 +30,7 @@ from .conftest import get_aqualink_device, get_aqualink_system from tests.common import async_fire_time_changed -async def _ffwd_next_update_interval(hass): +async def _ffwd_next_update_interval(hass: HomeAssistant) -> None: now = dt_util.utcnow() async_fire_time_changed(hass, now + UPDATE_INTERVAL) await hass.async_block_till_done() diff --git a/tests/components/icloud/test_config_flow.py b/tests/components/icloud/test_config_flow.py index ec8d11f1135..c0bc5d7ed2e 100644 --- a/tests/components/icloud/test_config_flow.py +++ b/tests/components/icloud/test_config_flow.py @@ -18,7 +18,7 @@ from homeassistant.components.icloud.const import ( DEFAULT_WITH_FAMILY, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -386,12 +386,7 @@ async def test_password_update( ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, - data={**MOCK_CONFIG}, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM result = await hass.config_entries.flow.async_configure( @@ -410,12 +405,7 @@ async def test_password_update_wrong_password(hass: HomeAssistant) -> None: ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "unique_id": config_entry.unique_id}, - data={**MOCK_CONFIG}, - ) - + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM with patch( diff --git a/tests/components/image_processing/common.py b/tests/components/image_processing/common.py index 4b3a008c6cd..35b94f2c91c 100644 --- a/tests/components/image_processing/common.py +++ b/tests/components/image_processing/common.py @@ -6,19 +6,19 @@ components. Instead call the service directly. from homeassistant.components.image_processing import DOMAIN, SERVICE_SCAN from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL -from homeassistant.core import callback +from homeassistant.core import HomeAssistant, callback from homeassistant.loader import bind_hass @bind_hass -def scan(hass, entity_id=ENTITY_MATCH_ALL): +def scan(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Force process of all cameras or given entity.""" hass.add_job(async_scan, hass, entity_id) @callback @bind_hass -def async_scan(hass, entity_id=ENTITY_MATCH_ALL): +def async_scan(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Force process of all cameras or given entity.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_SCAN, data)) diff --git a/tests/components/image_processing/test_init.py b/tests/components/image_processing/test_init.py index 577d3fc47db..3e7c8f2fb91 100644 --- a/tests/components/image_processing/test_init.py +++ b/tests/components/image_processing/test_init.py @@ -35,13 +35,15 @@ def aiohttp_unused_port_factory( return unused_tcp_port_factory -def get_url(hass): +def get_url(hass: HomeAssistant) -> str: """Return camera url.""" state = hass.states.get("camera.demo_camera") return f"{hass.config.internal_url}{state.attributes.get(ATTR_ENTITY_PICTURE)}" -async def setup_image_processing(hass, aiohttp_unused_port_factory): +async def setup_image_processing( + hass: HomeAssistant, aiohttp_unused_port_factory: Callable[[], int] +) -> None: """Set up things to be run when tests are started.""" await async_setup_component( hass, @@ -55,7 +57,7 @@ async def setup_image_processing(hass, aiohttp_unused_port_factory): await hass.async_block_till_done() -async def setup_image_processing_face(hass): +async def setup_image_processing_face(hass: HomeAssistant) -> None: """Set up things to be run when tests are started.""" config = {ip.DOMAIN: {"platform": "demo"}, "camera": {"platform": "demo"}} @@ -93,7 +95,7 @@ async def test_setup_component_with_service(hass: HomeAssistant) -> None: async def test_get_image_from_camera( mock_camera_read, hass: HomeAssistant, - aiohttp_unused_port_factory, + aiohttp_unused_port_factory: Callable[[], int], ) -> None: """Grab an image from camera entity.""" await setup_image_processing(hass, aiohttp_unused_port_factory) @@ -116,7 +118,7 @@ async def test_get_image_from_camera( async def test_get_image_without_exists_camera( mock_image, hass: HomeAssistant, - aiohttp_unused_port_factory, + aiohttp_unused_port_factory: Callable[[], int], ) -> None: """Try to get image without exists camera.""" await setup_image_processing(hass, aiohttp_unused_port_factory) @@ -191,7 +193,7 @@ async def test_face_event_call_no_confidence( @pytest.mark.usefixtures("enable_custom_integrations") async def test_update_missing_camera( hass: HomeAssistant, - aiohttp_unused_port_factory, + aiohttp_unused_port_factory: Callable[[], int], caplog: pytest.LogCaptureFixture, ) -> None: """Test when entity does not set camera.""" diff --git a/tests/components/imap/test_config_flow.py b/tests/components/imap/test_config_flow.py index 459cecec4a6..fb97bf0505d 100644 --- a/tests/components/imap/test_config_flow.py +++ b/tests/components/imap/test_config_flow.py @@ -215,15 +215,7 @@ async def test_reauth_success(hass: HomeAssistant, mock_setup_entry: AsyncMock) ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["description_placeholders"] == {CONF_USERNAME: "email@email.com"} @@ -256,15 +248,7 @@ async def test_reauth_failed(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -294,15 +278,7 @@ async def test_reauth_failed_conn_error(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) - + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr b/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr index f314a4be590..c5ae6880022 100644 --- a/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr +++ b/tests/components/imgw_pib/snapshots/test_binary_sensor.ambr @@ -95,101 +95,3 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[binary_sensor.station_name_flood_alarm-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.station_name_flood_alarm', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Flood alarm', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flood_alarm', - 'unique_id': '123_flood_alarm', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_alarm-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'alarm_level': 630.0, - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'safety', - 'friendly_name': 'Station Name Flood alarm', - }), - 'context': , - 'entity_id': 'binary_sensor.station_name_flood_alarm', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_warning-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.station_name_flood_warning', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Flood warning', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'flood_warning', - 'unique_id': '123_flood_warning', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[binary_sensor.station_name_flood_warning-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'safety', - 'friendly_name': 'Station Name Flood warning', - 'warning_level': 590.0, - }), - 'context': , - 'entity_id': 'binary_sensor.station_name_flood_warning', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/imgw_pib/snapshots/test_sensor.ambr b/tests/components/imgw_pib/snapshots/test_sensor.ambr index 2638e468d92..6c69b890842 100644 --- a/tests/components/imgw_pib/snapshots/test_sensor.ambr +++ b/tests/components/imgw_pib/snapshots/test_sensor.ambr @@ -213,113 +213,3 @@ 'state': '10.8', }) # --- -# name: test_sensor[sensor.station_name_water_level-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.station_name_water_level', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 0, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water level', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_level', - 'unique_id': '123_water_level', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.station_name_water_level-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'distance', - 'friendly_name': 'Station Name Water level', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.station_name_water_level', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '526.0', - }) -# --- -# name: test_sensor[sensor.station_name_water_temperature-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.station_name_water_temperature', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Water temperature', - 'platform': 'imgw_pib', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'water_temperature', - 'unique_id': '123_water_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_sensor[sensor.station_name_water_temperature-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by IMGW-PIB', - 'device_class': 'temperature', - 'friendly_name': 'Station Name Water temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.station_name_water_temperature', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.8', - }) -# --- diff --git a/tests/components/influxdb/test_init.py b/tests/components/influxdb/test_init.py index e9592a06fe2..f900be7b700 100644 --- a/tests/components/influxdb/test_init.py +++ b/tests/components/influxdb/test_init.py @@ -334,7 +334,9 @@ async def test_invalid_config( assert not await async_setup_component(hass, influxdb.DOMAIN, config) -async def _setup(hass, mock_influx_client, config_ext, get_write_api): +async def _setup( + hass: HomeAssistant, mock_influx_client, config_ext, get_write_api +) -> None: """Prepare client for next test and return event handler method.""" config = { "influxdb": { diff --git a/tests/components/influxdb/test_sensor.py b/tests/components/influxdb/test_sensor.py index 73dd8375a00..7f5954728a6 100644 --- a/tests/components/influxdb/test_sensor.py +++ b/tests/components/influxdb/test_sensor.py @@ -25,7 +25,7 @@ from homeassistant.components.influxdb.const import ( ) from homeassistant.components.influxdb.sensor import PLATFORM_SCHEMA from homeassistant.const import STATE_UNKNOWN -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers.entity_platform import PLATFORM_NOT_READY_BASE_WAIT_TIME from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util @@ -190,7 +190,9 @@ def _set_query_mock_v2( return query_api -async def _setup(hass, config_ext, queries, expected_sensors): +async def _setup( + hass: HomeAssistant, config_ext, queries, expected_sensors +) -> list[State]: """Create client and test expected sensors.""" config = { DOMAIN: config_ext, diff --git a/tests/components/input_datetime/test_init.py b/tests/components/input_datetime/test_init.py index fdbb9a7803f..411f084d39a 100644 --- a/tests/components/input_datetime/test_init.py +++ b/tests/components/input_datetime/test_init.py @@ -79,7 +79,9 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def async_set_date_and_time(hass, entity_id, dt_value): +async def async_set_date_and_time( + hass: HomeAssistant, entity_id: str, dt_value: datetime.datetime +) -> None: """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, @@ -93,7 +95,9 @@ async def async_set_date_and_time(hass, entity_id, dt_value): ) -async def async_set_datetime(hass, entity_id, dt_value): +async def async_set_datetime( + hass: HomeAssistant, entity_id: str, dt_value: datetime.datetime +) -> None: """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, @@ -103,7 +107,9 @@ async def async_set_datetime(hass, entity_id, dt_value): ) -async def async_set_timestamp(hass, entity_id, timestamp): +async def async_set_timestamp( + hass: HomeAssistant, entity_id: str, timestamp: float +) -> None: """Set date and / or time of input_datetime.""" await hass.services.async_call( DOMAIN, diff --git a/tests/components/input_number/test_init.py b/tests/components/input_number/test_init.py index 73e41f347ce..8ea1c2e25b6 100644 --- a/tests/components/input_number/test_init.py +++ b/tests/components/input_number/test_init.py @@ -65,7 +65,7 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def set_value(hass, entity_id, value): +async def set_value(hass: HomeAssistant, entity_id: str, value: str) -> None: """Set input_number to value. This is a legacy helper method. Do not use it for new tests. @@ -78,7 +78,7 @@ async def set_value(hass, entity_id, value): ) -async def increment(hass, entity_id): +async def increment(hass: HomeAssistant, entity_id: str) -> None: """Increment value of entity. This is a legacy helper method. Do not use it for new tests. @@ -88,7 +88,7 @@ async def increment(hass, entity_id): ) -async def decrement(hass, entity_id): +async def decrement(hass: HomeAssistant, entity_id: str) -> None: """Decrement value of entity. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/input_text/test_init.py b/tests/components/input_text/test_init.py index 3cae98b6dfe..2ca1d39a983 100644 --- a/tests/components/input_text/test_init.py +++ b/tests/components/input_text/test_init.py @@ -71,7 +71,7 @@ def storage_setup(hass: HomeAssistant, hass_storage: dict[str, Any]): return _storage -async def async_set_value(hass, entity_id, value): +async def async_set_value(hass: HomeAssistant, entity_id: str, value: str) -> None: """Set input_text to value.""" await hass.services.async_call( DOMAIN, diff --git a/tests/components/insteon/const.py b/tests/components/insteon/const.py index c35db3b7092..a4e4e8a390d 100644 --- a/tests/components/insteon/const.py +++ b/tests/components/insteon/const.py @@ -79,5 +79,4 @@ PATCH_CONNECTION = "homeassistant.components.insteon.config_flow.async_connect" PATCH_CONNECTION_CLOSE = "homeassistant.components.insteon.config_flow.async_close" PATCH_DEVICES = "homeassistant.components.insteon.config_flow.devices" PATCH_USB_LIST = "homeassistant.components.insteon.config_flow.async_get_usb_ports" -PATCH_ASYNC_SETUP = "homeassistant.components.insteon.async_setup" PATCH_ASYNC_SETUP_ENTRY = "homeassistant.components.insteon.async_setup_entry" diff --git a/tests/components/insteon/mock_devices.py b/tests/components/insteon/mock_devices.py index 6b5f5cf5e09..2c385c337fd 100644 --- a/tests/components/insteon/mock_devices.py +++ b/tests/components/insteon/mock_devices.py @@ -30,7 +30,7 @@ class MockSwitchLinc(SwitchedLightingControl_SwitchLinc02): class MockDevices: """Mock devices class.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Init the MockDevices class.""" self._devices = {} self.modem = None diff --git a/tests/components/insteon/test_api_aldb.py b/tests/components/insteon/test_api_aldb.py index 4376628d9a4..9f3c78b4b39 100644 --- a/tests/components/insteon/test_api_aldb.py +++ b/tests/components/insteon/test_api_aldb.py @@ -1,6 +1,7 @@ """Test the Insteon All-Link Database APIs.""" import json +from typing import Any from unittest.mock import patch from pyinsteon import pub @@ -23,7 +24,7 @@ from homeassistant.core import HomeAssistant from .mock_devices import MockDevices from tests.common import load_fixture -from tests.typing import WebSocketGenerator +from tests.typing import MockHAClientWebSocket, WebSocketGenerator @pytest.fixture(name="aldb_data", scope="module") @@ -32,7 +33,9 @@ def aldb_data_fixture(): return json.loads(load_fixture("insteon/aldb_data.json")) -async def _setup(hass, hass_ws_client, aldb_data): +async def _setup( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, aldb_data: dict[str, Any] +) -> tuple[MockHAClientWebSocket, MockDevices]: """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() diff --git a/tests/components/insteon/test_api_properties.py b/tests/components/insteon/test_api_properties.py index aee35cb8994..35ff95a5cc8 100644 --- a/tests/components/insteon/test_api_properties.py +++ b/tests/components/insteon/test_api_properties.py @@ -1,6 +1,7 @@ """Test the Insteon properties APIs.""" import json +from typing import Any from unittest.mock import AsyncMock, patch from pyinsteon.config import MOMENTARY_DELAY, RELAY_MODE, TOGGLE_BUTTON @@ -26,7 +27,7 @@ from homeassistant.core import HomeAssistant from .mock_devices import MockDevices from tests.common import load_fixture -from tests.typing import WebSocketGenerator +from tests.typing import MockHAClientWebSocket, WebSocketGenerator @pytest.fixture(name="kpl_properties_data", scope="module") @@ -41,7 +42,12 @@ def iolinc_properties_data_fixture(): return json.loads(load_fixture("insteon/iolinc_properties.json")) -async def _setup(hass, hass_ws_client, address, properties_data): +async def _setup( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + address: str, + properties_data: dict[str, Any], +) -> tuple[MockHAClientWebSocket, MockDevices]: """Set up tests.""" ws_client = await hass_ws_client(hass) devices = MockDevices() diff --git a/tests/components/insteon/test_config_flow.py b/tests/components/insteon/test_config_flow.py index 4d3fb815463..31d38a603f1 100644 --- a/tests/components/insteon/test_config_flow.py +++ b/tests/components/insteon/test_config_flow.py @@ -1,6 +1,8 @@ """Test the config flow for the Insteon integration.""" -from unittest.mock import patch +from collections.abc import Callable +from typing import Any +from unittest.mock import AsyncMock, patch import pytest from voluptuous_serialize import convert @@ -14,7 +16,7 @@ from homeassistant.components.insteon.config_flow import ( STEP_PLM_MANUALLY, ) from homeassistant.components.insteon.const import CONF_HUB_VERSION, DOMAIN -from homeassistant.config_entries import ConfigEntryState +from homeassistant.config_entries import ConfigEntryState, ConfigFlowResult from homeassistant.const import CONF_DEVICE, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -25,7 +27,6 @@ from .const import ( MOCK_USER_INPUT_HUB_V2, MOCK_USER_INPUT_PLM, MOCK_USER_INPUT_PLM_MANUAL, - PATCH_ASYNC_SETUP, PATCH_ASYNC_SETUP_ENTRY, PATCH_CONNECTION, PATCH_USB_LIST, @@ -61,7 +62,7 @@ async def mock_failed_connection(*args, **kwargs): raise ConnectionError("Connection failed") -async def _init_form(hass, modem_type): +async def _init_form(hass: HomeAssistant, modem_type: str) -> ConfigFlowResult: """Run the user form.""" result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -74,14 +75,18 @@ async def _init_form(hass, modem_type): ) -async def _device_form(hass, flow_id, connection, user_input): +async def _device_form( + hass: HomeAssistant, + flow_id: str, + connection: Callable[..., Any], + user_input: dict[str, Any] | None, +) -> tuple[ConfigFlowResult, AsyncMock]: """Test the PLM, Hub v1 or Hub v2 form.""" with ( patch( PATCH_CONNECTION, new=connection, ), - patch(PATCH_ASYNC_SETUP, return_value=True) as mock_setup, patch( PATCH_ASYNC_SETUP_ENTRY, return_value=True, @@ -89,7 +94,7 @@ async def _device_form(hass, flow_id, connection, user_input): ): result = await hass.config_entries.flow.async_configure(flow_id, user_input) await hass.async_block_till_done() - return result, mock_setup, mock_setup_entry + return result, mock_setup_entry async def test_form_select_modem(hass: HomeAssistant) -> None: @@ -125,13 +130,12 @@ async def test_form_select_plm(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.CREATE_ENTRY assert result2["data"] == MOCK_USER_INPUT_PLM - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -142,7 +146,7 @@ async def test_form_select_plm_no_usb(hass: HomeAssistant) -> None: USB_PORTS.clear() result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_successful_connection, None ) USB_PORTS.update(temp_usb_list) @@ -155,18 +159,17 @@ async def test_form_select_plm_manual(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM_MANUAL ) - result3, mock_setup, mock_setup_entry = await _device_form( + result3, mock_setup_entry = await _device_form( hass, result2["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.FORM assert result3["type"] is FlowResultType.CREATE_ENTRY assert result3["data"] == MOCK_USER_INPUT_PLM - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -175,7 +178,7 @@ async def test_form_select_hub_v1(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V1) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_HUB_V1 ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -184,7 +187,6 @@ async def test_form_select_hub_v1(hass: HomeAssistant) -> None: CONF_HUB_VERSION: 1, } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -193,7 +195,7 @@ async def test_form_select_hub_v2(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V2) - result2, mock_setup, mock_setup_entry = await _device_form( + result2, mock_setup_entry = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_HUB_V2 ) assert result2["type"] is FlowResultType.CREATE_ENTRY @@ -202,7 +204,6 @@ async def test_form_select_hub_v2(hass: HomeAssistant) -> None: CONF_HUB_VERSION: 2, } - assert len(mock_setup.mock_calls) == 1 assert len(mock_setup_entry.mock_calls) == 1 @@ -233,7 +234,7 @@ async def test_failed_connection_plm(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM ) assert result2["type"] is FlowResultType.FORM @@ -245,10 +246,10 @@ async def test_failed_connection_plm_manually(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_PLM) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_successful_connection, MOCK_USER_INPUT_PLM_MANUAL ) - result3, _, _ = await _device_form( + result3, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_PLM ) assert result3["type"] is FlowResultType.FORM @@ -260,7 +261,7 @@ async def test_failed_connection_hub(hass: HomeAssistant) -> None: result = await _init_form(hass, STEP_HUB_V2) - result2, _, _ = await _device_form( + result2, _ = await _device_form( hass, result["flow_id"], mock_failed_connection, MOCK_USER_INPUT_HUB_V2 ) assert result2["type"] is FlowResultType.FORM @@ -284,7 +285,7 @@ async def test_discovery_via_usb(hass: HomeAssistant) -> None: assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm_usb" - with patch(PATCH_CONNECTION), patch(PATCH_ASYNC_SETUP, return_value=True): + with patch(PATCH_CONNECTION): result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={} ) diff --git a/tests/components/intellifire/__init__.py b/tests/components/intellifire/__init__.py index f655ccc2fa4..50497939f7f 100644 --- a/tests/components/intellifire/__init__.py +++ b/tests/components/intellifire/__init__.py @@ -1 +1,13 @@ """Tests for the IntelliFire integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/intellifire/conftest.py b/tests/components/intellifire/conftest.py index cf1e085c10f..251d5bdde48 100644 --- a/tests/components/intellifire/conftest.py +++ b/tests/components/intellifire/conftest.py @@ -1,14 +1,40 @@ """Fixtures for IntelliFire integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, Mock, patch +from unittest.mock import AsyncMock, MagicMock, Mock, PropertyMock, patch -from aiohttp.client_reqrep import ConnectionKey +from intellifire4py.const import IntelliFireApiMode +from intellifire4py.model import ( + IntelliFireCommonFireplaceData, + IntelliFirePollData, + IntelliFireUserData, +) import pytest +from homeassistant.components.intellifire.const import ( + API_MODE_CLOUD, + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_USER_ID, + CONF_WEB_CLIENT_ID, + DOMAIN, +) +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) + +from tests.common import MockConfigEntry, load_json_object_fixture + @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock]: +def mock_setup_entry() -> Generator[AsyncMock, None, None]: """Mock setting up a config entry.""" with patch( "homeassistant.components.intellifire.async_setup_entry", return_value=True @@ -17,44 +43,206 @@ def mock_setup_entry() -> Generator[AsyncMock]: @pytest.fixture -def mock_fireplace_finder_none() -> Generator[MagicMock]: +def mock_fireplace_finder_none() -> Generator[None, MagicMock, None]: """Mock fireplace finder.""" mock_found_fireplaces = Mock() mock_found_fireplaces.ips = [] with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace" + "homeassistant.components.intellifire.config_flow.UDPFireplaceFinder.search_fireplace" ): yield mock_found_fireplaces @pytest.fixture -def mock_fireplace_finder_single() -> Generator[MagicMock]: - """Mock fireplace finder.""" - mock_found_fireplaces = Mock() - mock_found_fireplaces.ips = ["192.168.1.69"] - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace" - ): - yield mock_found_fireplaces +def mock_config_entry_current() -> MockConfigEntry: + """Return a mock config entry.""" + return MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=2, + data={ + CONF_IP_ADDRESS: "192.168.2.108", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_SERIAL: "3FB284769E4736F30C8973A7ED358123", + CONF_WEB_CLIENT_ID: "FA2B1C3045601234D0AE17D72F8E975", + CONF_API_KEY: "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + CONF_AUTH_COOKIE: "B984F21A6378560019F8A1CDE41B6782", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + options={CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_CLOUD}, + unique_id="3FB284769E4736F30C8973A7ED358123", + ) @pytest.fixture -def mock_intellifire_config_flow() -> Generator[MagicMock]: - """Return a mocked IntelliFire client.""" - data_mock = Mock() - data_mock.serial = "12345" +def mock_config_entry_old() -> MockConfigEntry: + """For migration testing.""" + return MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=1, + title="Fireplace 3FB284769E4736F30C8973A7ED358123", + data={ + CONF_HOST: "192.168.2.108", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + ) + +@pytest.fixture +def mock_common_data_local() -> IntelliFireCommonFireplaceData: + """Fixture for mock common data.""" + return IntelliFireCommonFireplaceData( + auth_cookie="B984F21A6378560019F8A1CDE41B6782", + user_id="52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + web_client_id="FA2B1C3045601234D0AE17D72F8E975", + serial="3FB284769E4736F30C8973A7ED358123", + api_key="B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + ip_address="192.168.2.108", + read_mode=IntelliFireApiMode.LOCAL, + control_mode=IntelliFireApiMode.LOCAL, + ) + + +@pytest.fixture +def mock_apis_multifp( + mock_cloud_interface, mock_local_interface, mock_fp +) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock], None, None]: + """Multi fireplace version of mocks.""" + return mock_local_interface, mock_cloud_interface, mock_fp + + +@pytest.fixture +def mock_apis_single_fp( + mock_cloud_interface, mock_local_interface, mock_fp +) -> Generator[tuple[AsyncMock, AsyncMock, MagicMock], None, None]: + """Single fire place version of the mocks.""" + data_v1 = IntelliFireUserData( + **load_json_object_fixture("user_data_1.json", DOMAIN) + ) + with patch.object( + type(mock_cloud_interface), "user_data", new_callable=PropertyMock + ) as mock_user_data: + mock_user_data.return_value = data_v1 + yield mock_local_interface, mock_cloud_interface, mock_fp + + +@pytest.fixture +def mock_cloud_interface() -> Generator[AsyncMock, None, None]: + """Mock cloud interface to use for testing.""" + user_data = IntelliFireUserData( + **load_json_object_fixture("user_data_3.json", DOMAIN) + ) + + with ( + patch( + "homeassistant.components.intellifire.IntelliFireCloudInterface", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.intellifire.config_flow.IntelliFireCloudInterface", + new=mock_client, + ), + patch( + "intellifire4py.cloud_interface.IntelliFireCloudInterface", + new=mock_client, + ), + ): + # Mock async context manager + mock_client = mock_client.return_value + mock_client.__aenter__ = AsyncMock(return_value=mock_client) + mock_client.__aexit__ = AsyncMock(return_value=None) + + # Mock other async methods if needed + mock_client.login_with_credentials = AsyncMock() + mock_client.poll = AsyncMock() + type(mock_client).user_data = PropertyMock(return_value=user_data) + + yield mock_client # Yielding to the test + + +@pytest.fixture +def mock_local_interface() -> Generator[AsyncMock, None, None]: + """Mock version of IntelliFireAPILocal.""" + poll_data = IntelliFirePollData( + **load_json_object_fixture("intellifire/local_poll.json") + ) with patch( - "homeassistant.components.intellifire.config_flow.IntellifireAPILocal", + "homeassistant.components.intellifire.config_flow.IntelliFireAPILocal", autospec=True, - ) as intellifire_mock: - intellifire = intellifire_mock.return_value - intellifire.data = data_mock - yield intellifire + ) as mock_client: + mock_client = mock_client.return_value + # Mock all instances of the class + type(mock_client).data = PropertyMock(return_value=poll_data) + yield mock_client -def mock_api_connection_error() -> ConnectionError: - """Return a fake a ConnectionError for iftapi.net.""" - ret = ConnectionError() - ret.args = [ConnectionKey("iftapi.net", 443, False, None, None, None, None)] - return ret +@pytest.fixture +def mock_fp(mock_common_data_local) -> Generator[AsyncMock, None, None]: + """Mock fireplace.""" + + local_poll_data = IntelliFirePollData( + **load_json_object_fixture("local_poll.json", DOMAIN) + ) + + assert local_poll_data.connection_quality == 988451 + + with patch( + "homeassistant.components.intellifire.UnifiedFireplace" + ) as mock_unified_fireplace: + # Create an instance of the mock + mock_instance = mock_unified_fireplace.return_value + + # Mock methods and properties of the instance + mock_instance.perform_cloud_poll = AsyncMock() + mock_instance.perform_local_poll = AsyncMock() + + mock_instance.async_validate_connectivity = AsyncMock(return_value=(True, True)) + + type(mock_instance).is_cloud_polling = PropertyMock(return_value=False) + type(mock_instance).is_local_polling = PropertyMock(return_value=True) + + mock_instance.get_user_data_as_json.return_value = '{"mock": "data"}' + + mock_instance.ip_address = "192.168.1.100" + mock_instance.api_key = "mock_api_key" + mock_instance.serial = "mock_serial" + mock_instance.user_id = "mock_user_id" + mock_instance.auth_cookie = "mock_auth_cookie" + mock_instance.web_client_id = "mock_web_client_id" + + # Configure the READ Api + mock_instance.read_api = MagicMock() + mock_instance.read_api.poll = MagicMock(return_value=local_poll_data) + mock_instance.read_api.data = local_poll_data + + mock_instance.control_api = MagicMock() + + mock_instance.local_connectivity = True + mock_instance.cloud_connectivity = False + + mock_instance._read_mode = IntelliFireApiMode.LOCAL + mock_instance.read_mode = IntelliFireApiMode.LOCAL + + mock_instance.control_mode = IntelliFireApiMode.LOCAL + mock_instance._control_mode = IntelliFireApiMode.LOCAL + + mock_instance.data = local_poll_data + + mock_instance.set_read_mode = AsyncMock() + mock_instance.set_control_mode = AsyncMock() + + mock_instance.async_validate_connectivity = AsyncMock( + return_value=(True, False) + ) + + # Patch class methods + with patch( + "homeassistant.components.intellifire.UnifiedFireplace.build_fireplace_from_common", + new_callable=AsyncMock, + return_value=mock_instance, + ): + yield mock_instance diff --git a/tests/components/intellifire/fixtures/local_poll.json b/tests/components/intellifire/fixtures/local_poll.json new file mode 100644 index 00000000000..9dac47c698d --- /dev/null +++ b/tests/components/intellifire/fixtures/local_poll.json @@ -0,0 +1,29 @@ +{ + "name": "", + "serial": "4GC295860E5837G40D9974B7FD459234", + "temperature": 17, + "battery": 0, + "pilot": 1, + "light": 0, + "height": 1, + "fanspeed": 1, + "hot": 0, + "power": 1, + "thermostat": 0, + "setpoint": 0, + "timer": 0, + "timeremaining": 0, + "prepurge": 0, + "feature_light": 0, + "feature_thermostat": 1, + "power_vent": 0, + "feature_fan": 1, + "errors": [], + "fw_version": "0x00030200", + "fw_ver_str": "0.3.2+hw2", + "downtime": 0, + "uptime": 117, + "connection_quality": 988451, + "ecm_latency": 0, + "ipv4_address": "192.168.2.108" +} diff --git a/tests/components/intellifire/fixtures/user_data_1.json b/tests/components/intellifire/fixtures/user_data_1.json new file mode 100644 index 00000000000..501d240662b --- /dev/null +++ b/tests/components/intellifire/fixtures/user_data_1.json @@ -0,0 +1,17 @@ +{ + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "fireplaces": [ + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123" + } + ], + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas" +} diff --git a/tests/components/intellifire/fixtures/user_data_3.json b/tests/components/intellifire/fixtures/user_data_3.json new file mode 100644 index 00000000000..39e9c95abbd --- /dev/null +++ b/tests/components/intellifire/fixtures/user_data_3.json @@ -0,0 +1,33 @@ +{ + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "fireplaces": [ + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123" + }, + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.109", + "api_key": "D4C5EB28BBFF41E1FB21AFF9BFA6CD34", + "serial": "4GC295860E5837G40D9974B7FD459234" + }, + { + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "ip_address": "192.168.2.110", + "api_key": "E5D6FC39CCED52F1FB21AFF9BFA6DE56", + "serial": "5HD306971F5938H51EAA85C8GE561345" + } + ], + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas" +} diff --git a/tests/components/intellifire/snapshots/test_binary_sensor.ambr b/tests/components/intellifire/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..34d5836a025 --- /dev/null +++ b/tests/components/intellifire/snapshots/test_binary_sensor.ambr @@ -0,0 +1,717 @@ +# serializer version: 1 +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_accessory_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_accessory_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Accessory error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'accessory_error', + 'unique_id': 'error_accessory_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_accessory_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Accessory error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_accessory_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_disabled_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Disabled error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'disabled_error', + 'unique_id': 'error_disabled_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_disabled_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Disabled error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_disabled_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_ecm_offline_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_ecm_offline_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'ECM offline error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ecm_offline_error', + 'unique_id': 'error_ecm_offline_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_ecm_offline_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire ECM offline error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_ecm_offline_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_delay_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_fan_delay_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fan delay error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_delay_error', + 'unique_id': 'error_fan_delay_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_delay_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Fan delay error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_fan_delay_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_fan_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Fan error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_error', + 'unique_id': 'error_fan_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_fan_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Fan error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_fan_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_flame', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Flame', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame', + 'unique_id': 'on_off_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Flame', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_flame', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_flame_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Flame Error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame_error', + 'unique_id': 'error_flame_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_flame_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Flame Error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_flame_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_lights_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_lights_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Lights error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'lights_error', + 'unique_id': 'error_lights_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_lights_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Lights error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_lights_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_maintenance_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Maintenance error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'maintenance_error', + 'unique_id': 'error_maintenance_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_maintenance_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Maintenance error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_maintenance_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_offline_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_offline_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Offline error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'offline_error', + 'unique_id': 'error_offline_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_offline_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Offline error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_offline_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_flame_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_pilot_flame_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Pilot flame error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pilot_flame_error', + 'unique_id': 'error_pilot_flame_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_flame_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Pilot flame error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_pilot_flame_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_light_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_pilot_light_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Pilot light on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'pilot_light_on', + 'unique_id': 'pilot_light_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_pilot_light_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Pilot light on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_pilot_light_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_soft_lock_out_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.intellifire_soft_lock_out_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Soft lock out error', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'soft_lock_out_error', + 'unique_id': 'error_soft_lock_out_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_soft_lock_out_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'problem', + 'friendly_name': 'IntelliFire Soft lock out error', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_soft_lock_out_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_thermostat_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_thermostat_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'thermostat_on', + 'unique_id': 'thermostat_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_thermostat_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Thermostat on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_thermostat_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_timer_on-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.intellifire_timer_on', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Timer on', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_on', + 'unique_id': 'timer_on_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_binary_sensor_entities[binary_sensor.intellifire_timer_on-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Timer on', + }), + 'context': , + 'entity_id': 'binary_sensor.intellifire_timer_on', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/intellifire/snapshots/test_climate.ambr b/tests/components/intellifire/snapshots/test_climate.ambr new file mode 100644 index 00000000000..36f719d2264 --- /dev/null +++ b/tests/components/intellifire/snapshots/test_climate.ambr @@ -0,0 +1,66 @@ +# serializer version: 1 +# name: test_all_sensor_entities[climate.intellifire_thermostat-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 37, + 'min_temp': 0, + 'target_temp_step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.intellifire_thermostat', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Thermostat', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'climate_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[climate.intellifire_thermostat-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'current_temperature': 17.0, + 'friendly_name': 'IntelliFire Thermostat', + 'hvac_modes': list([ + , + , + ]), + 'max_temp': 37, + 'min_temp': 0, + 'supported_features': , + 'target_temp_step': 1.0, + 'temperature': 0.0, + }), + 'context': , + 'entity_id': 'climate.intellifire_thermostat', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/intellifire/snapshots/test_sensor.ambr b/tests/components/intellifire/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..d5e59e3f00f --- /dev/null +++ b/tests/components/intellifire/snapshots/test_sensor.ambr @@ -0,0 +1,587 @@ +# serializer version: 1 +# name: test_all_sensor_entities[sensor.intellifire_connection_quality-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_connection_quality', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Connection quality', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'connection_quality', + 'unique_id': 'connection_quality_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_connection_quality-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Connection quality', + }), + 'context': , + 'entity_id': 'sensor.intellifire_connection_quality', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '988451', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_downtime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_downtime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Downtime', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'downtime', + 'unique_id': 'downtime_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_downtime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Downtime', + }), + 'context': , + 'entity_id': 'sensor.intellifire_downtime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ecm_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_ecm_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'ECM latency', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ecm_latency', + 'unique_id': 'ecm_latency_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ecm_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire ECM latency', + }), + 'context': , + 'entity_id': 'sensor.intellifire_ecm_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_fan_speed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_fan_speed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Fan Speed', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fan_speed', + 'unique_id': 'fan_speed_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_fan_speed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Fan Speed', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_fan_speed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_flame_height-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_flame_height', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Flame height', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'flame_height', + 'unique_id': 'flame_height_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_flame_height-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Flame height', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_flame_height', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ip_address-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_ip_address', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'IP address', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ipv4_address', + 'unique_id': 'ipv4_address_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_ip_address-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire IP address', + }), + 'context': , + 'entity_id': 'sensor.intellifire_ip_address', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '192.168.2.108', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_local_connectivity-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_local_connectivity', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Local connectivity', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'local_connectivity', + 'unique_id': 'local_connectivity_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_local_connectivity-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire Local connectivity', + }), + 'context': , + 'entity_id': 'sensor.intellifire_local_connectivity', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'True', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'local_connectivity', + 'unique_id': 'local_connectivity_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'friendly_name': 'IntelliFire None', + }), + 'context': , + 'entity_id': 'sensor.intellifire_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'True', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_target_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_target_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Target temperature', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'target_temp', + 'unique_id': 'target_temp_mock_serial', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_target_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'temperature', + 'friendly_name': 'IntelliFire Target temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_target_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'temperature_mock_serial', + 'unit_of_measurement': , + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'temperature', + 'friendly_name': 'IntelliFire Temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '17', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_timer_end-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.intellifire_timer_end', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Timer end', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'timer_end_timestamp', + 'unique_id': 'timer_end_timestamp_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_timer_end-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Timer end', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.intellifire_timer_end', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.intellifire_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'intellifire', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'uptime', + 'unique_id': 'uptime_mock_serial', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_sensor_entities[sensor.intellifire_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by unpublished Intellifire API', + 'device_class': 'timestamp', + 'friendly_name': 'IntelliFire Uptime', + }), + 'context': , + 'entity_id': 'sensor.intellifire_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T11:58:03+00:00', + }) +# --- diff --git a/tests/components/intellifire/test_binary_sensor.py b/tests/components/intellifire/test_binary_sensor.py new file mode 100644 index 00000000000..a40f92b84d5 --- /dev/null +++ b/tests/components/intellifire/test_binary_sensor.py @@ -0,0 +1,35 @@ +"""Test IntelliFire Binary Sensors.""" + +from unittest.mock import AsyncMock, patch + +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_binary_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_apis_single_fp: tuple[AsyncMock, AsyncMock, AsyncMock], +) -> None: + """Test all entities.""" + + with ( + patch( + "homeassistant.components.intellifire.PLATFORMS", [Platform.BINARY_SENSOR] + ), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) diff --git a/tests/components/intellifire/test_climate.py b/tests/components/intellifire/test_climate.py new file mode 100644 index 00000000000..da1b2864791 --- /dev/null +++ b/tests/components/intellifire/test_climate.py @@ -0,0 +1,34 @@ +"""Test climate.""" + +from unittest.mock import patch + +from freezegun import freeze_time +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@freeze_time("2021-01-01T12:00:00Z") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_fp, +) -> None: + """Test all entities.""" + with ( + patch("homeassistant.components.intellifire.PLATFORMS", [Platform.CLIMATE]), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) diff --git a/tests/components/intellifire/test_config_flow.py b/tests/components/intellifire/test_config_flow.py index ba4e2f039a3..f1465c4dcd4 100644 --- a/tests/components/intellifire/test_config_flow.py +++ b/tests/components/intellifire/test_config_flow.py @@ -1,323 +1,168 @@ """Test the IntelliFire config flow.""" -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import AsyncMock -from intellifire4py.exceptions import LoginException +from intellifire4py.exceptions import LoginError from homeassistant import config_entries from homeassistant.components import dhcp -from homeassistant.components.intellifire.config_flow import MANUAL_ENTRY_STRING -from homeassistant.components.intellifire.const import CONF_USER_ID, DOMAIN -from homeassistant.const import CONF_API_KEY, CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.components.intellifire.const import CONF_SERIAL, DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import mock_api_connection_error - from tests.common import MockConfigEntry -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_no_discovery( +async def test_standard_config_with_single_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_single_fp, ) -> None: - """Test we should get the manual discovery form - because no discovered fireplaces.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=[], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM + """Test standard flow with a user who has only a single fireplace.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] == FlowResultType.FORM assert result["errors"] == {} - assert result["step_id"] == "manual_device_entry" + assert result["step_id"] == "cloud_api" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "api_config" - - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, - ) - await hass.async_block_till_done() - - assert result3["type"] is FlowResultType.CREATE_ENTRY - assert result3["title"] == "Fireplace 12345" - assert result3["data"] == { - CONF_HOST: "1.1.1.1", - CONF_USERNAME: "test", - CONF_PASSWORD: "AROONIE", - CONF_API_KEY: "key", - CONF_USER_ID: "intellifire", + # For a single fireplace we just create it + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", } - assert len(mock_setup_entry.mock_calls) == 1 -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(side_effect=mock_api_connection_error()), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_single_discovery( +async def test_standard_config_with_pre_configured_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_config_entry_current, + mock_apis_single_fp, ) -> None: - """Test single fireplace UDP discovery.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) + """What if we try to configure an already configured fireplace.""" + # Configure an existing entry + mock_config_entry_current.add_to_hass(hass) - await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: "192.168.1.69"} + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} ) - await hass.async_block_till_done() - result3 = await hass.config_entries.flow.async_configure( + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.FORM - assert result3["errors"] == {"base": "iftapi_connect"} + + # For a single fireplace we just create it + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "no_available_devices" -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(side_effect=LoginException), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_single_discovery_loign_error( +async def test_standard_config_with_single_fireplace_and_bad_credentials( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_single_fp, ) -> None: - """Test single fireplace UDP discovery.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - await hass.config_entries.flow.async_configure( - result["flow_id"], {CONF_HOST: "192.168.1.69"} - ) - await hass.async_block_till_done() - result3 = await hass.config_entries.flow.async_configure( - result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, - ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.FORM - assert result3["errors"] == {"base": "api_error"} - - -async def test_manual_entry( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple Fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["step_id"] == "pick_device" - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: MANUAL_ENTRY_STRING} - ) - - await hass.async_block_till_done() - assert result2["step_id"] == "manual_device_entry" - - -async def test_multi_discovery( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - - assert result["step_id"] == "pick_device" - await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "192.168.1.33"} - ) - await hass.async_block_till_done() - assert result["step_id"] == "pick_device" - - -async def test_multi_discovery_cannot_connect( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test for multiple fireplace discovery - involving a pick_device step.""" - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.69", "192.168.1.33", "192.168.169"], - ): - mock_intellifire_config_flow.poll.side_effect = ConnectionError - - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "pick_device" - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], user_input={CONF_HOST: "192.168.1.33"} - ) - await hass.async_block_till_done() - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} - - -async def test_form_cannot_connect_manual_entry( - hass: HomeAssistant, - mock_intellifire_config_flow: MagicMock, - mock_fireplace_finder_single: AsyncMock, -) -> None: - """Test we handle cannot connect error.""" - mock_intellifire_config_flow.poll.side_effect = ConnectionError + """Test bad credentials on a login.""" + mock_local_interface, mock_cloud_interface, mock_fp = mock_apis_single_fp + # Set login error + mock_cloud_interface.login_with_credentials.side_effect = LoginError result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "manual_device_entry" + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" - result2 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - { - CONF_HOST: "1.1.1.1", - }, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - assert result2["type"] is FlowResultType.FORM - assert result2["errors"] == {"base": "cannot_connect"} + # Erase the error + mock_cloud_interface.login_with_credentials.side_effect = None + + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {"base": "api_error"} + assert result["step_id"] == "cloud_api" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + ) + # For a single fireplace we just create it + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } -async def test_picker_already_discovered( +async def test_standard_config_with_multiple_fireplace( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_multifp, ) -> None: - """Test single fireplace UDP discovery.""" - - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "host": "192.168.1.3", - }, - title="Fireplace", - unique_id=44444, - ) - entry.add_to_hass(hass) - with patch( - "homeassistant.components.intellifire.config_flow.AsyncUDPFireplaceFinder.search_fireplace", - return_value=["192.168.1.3"], - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_USER} - ) - await hass.async_block_till_done() - - result2 = await hass.config_entries.flow.async_configure( - result["flow_id"], - { - CONF_HOST: "192.168.1.4", - }, - ) - assert result2["type"] is FlowResultType.FORM - assert len(mock_setup_entry.mock_calls) == 0 - - -@patch.multiple( - "homeassistant.components.intellifire.config_flow.IntellifireAPICloud", - login=AsyncMock(), - get_user_id=MagicMock(return_value="intellifire"), - get_fireplace_api_key=MagicMock(return_value="key"), -) -async def test_reauth_flow( - hass: HomeAssistant, - mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, -) -> None: - """Test the reauth flow.""" - - entry = MockConfigEntry( - domain=DOMAIN, - data={ - "host": "192.168.1.3", - }, - title="Fireplace 1234", - version=1, - unique_id="4444", - ) - entry.add_to_hass(hass) - + """Test multi-fireplace user who must be very rich.""" result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": "reauth", - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, + DOMAIN, context={"source": config_entries.SOURCE_USER} ) + assert result["type"] == FlowResultType.FORM + assert result["errors"] == {} + assert result["step_id"] == "cloud_api" - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "api_config" - - result3 = await hass.config_entries.flow.async_configure( + result = await hass.config_entries.flow.async_configure( result["flow_id"], - {CONF_USERNAME: "test", CONF_PASSWORD: "AROONIE"}, + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - await hass.async_block_till_done() - assert result3["type"] is FlowResultType.ABORT - assert entry.data[CONF_PASSWORD] == "AROONIE" - assert entry.data[CONF_USERNAME] == "test" + # When we have multiple fireplaces we get to pick a serial + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "pick_cloud_device" + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_SERIAL: "4GC295860E5837G40D9974B7FD459234"}, + ) + assert result["type"] == FlowResultType.CREATE_ENTRY + assert result["data"] == { + "ip_address": "192.168.2.109", + "api_key": "D4C5EB28BBFF41E1FB21AFF9BFA6CD34", + "serial": "4GC295860E5837G40D9974B7FD459234", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } async def test_dhcp_discovery_intellifire_device( hass: HomeAssistant, mock_setup_entry: AsyncMock, - mock_intellifire_config_flow: MagicMock, + mock_apis_multifp, ) -> None: """Test successful DHCP Discovery.""" + result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_DHCP}, @@ -327,26 +172,26 @@ async def test_dhcp_discovery_intellifire_device( hostname="zentrios-Test", ), ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "dhcp_confirm" - result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) - assert result2["type"] is FlowResultType.FORM - assert result2["step_id"] == "dhcp_confirm" - result3 = await hass.config_entries.flow.async_configure( - result2["flow_id"], user_input={} + assert result["type"] == FlowResultType.FORM + assert result["step_id"] == "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, ) - assert result3["title"] == "Fireplace 12345" - assert result3["data"] == {"host": "1.1.1.1"} + assert result["type"] == FlowResultType.CREATE_ENTRY async def test_dhcp_discovery_non_intellifire_device( hass: HomeAssistant, - mock_intellifire_config_flow: MagicMock, mock_setup_entry: AsyncMock, + mock_apis_multifp, ) -> None: - """Test failed DHCP Discovery.""" + """Test successful DHCP Discovery of a non intellifire device..""" - mock_intellifire_config_flow.poll.side_effect = ConnectionError + # Patch poll with an exception + mock_local_interface, mock_cloud_interface, mock_fp = mock_apis_multifp + mock_local_interface.poll.side_effect = ConnectionError result = await hass.config_entries.flow.async_init( DOMAIN, @@ -357,6 +202,28 @@ async def test_dhcp_discovery_non_intellifire_device( hostname="zentrios-Evil", ), ) - - assert result["type"] is FlowResultType.ABORT + assert result["type"] == FlowResultType.ABORT assert result["reason"] == "not_intellifire_device" + # Test is finished - the DHCP scanner detected a hostname that "might" be an IntelliFire device, but it was not. + + +async def test_reauth_flow( + hass: HomeAssistant, + mock_config_entry_current: MockConfigEntry, + mock_apis_single_fp, + mock_setup_entry: AsyncMock, +) -> None: + """Test reauth.""" + + mock_config_entry_current.add_to_hass(hass) + result = await mock_config_entry_current.start_reauth_flow(hass) + assert result["type"] == FlowResultType.FORM + result["step_id"] = "cloud_api" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_USERNAME: "donJulio", CONF_PASSWORD: "Tequila0FD00m"}, + ) + + assert result["type"] == FlowResultType.ABORT + assert result["reason"] == "reauth_successful" diff --git a/tests/components/intellifire/test_init.py b/tests/components/intellifire/test_init.py new file mode 100644 index 00000000000..6d08fda26c3 --- /dev/null +++ b/tests/components/intellifire/test_init.py @@ -0,0 +1,111 @@ +"""Test the IntelliFire config flow.""" + +from unittest.mock import AsyncMock, patch + +from homeassistant.components.intellifire import CONF_USER_ID +from homeassistant.components.intellifire.const import ( + API_MODE_CLOUD, + API_MODE_LOCAL, + CONF_AUTH_COOKIE, + CONF_CONTROL_MODE, + CONF_READ_MODE, + CONF_SERIAL, + CONF_WEB_CLIENT_ID, + DOMAIN, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + CONF_API_KEY, + CONF_HOST, + CONF_IP_ADDRESS, + CONF_PASSWORD, + CONF_USERNAME, +) +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def test_minor_migration( + hass: HomeAssistant, mock_config_entry_old, mock_apis_single_fp +) -> None: + """With the new library we are going to end up rewriting the config entries.""" + mock_config_entry_old.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_old.entry_id) + + assert mock_config_entry_old.data == { + "ip_address": "192.168.2.108", + "host": "192.168.2.108", + "api_key": "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + "serial": "3FB284769E4736F30C8973A7ED358123", + "auth_cookie": "B984F21A6378560019F8A1CDE41B6782", + "web_client_id": "FA2B1C3045601234D0AE17D72F8E975", + "user_id": "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + "username": "grumpypanda@china.cn", + "password": "you-stole-my-pandas", + } + + +async def test_minor_migration_error(hass: HomeAssistant, mock_apis_single_fp) -> None: + """Test the case where we completely fail to initialize.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=1, + title="Fireplace of testing", + data={ + CONF_HOST: "11.168.2.218", + CONF_USERNAME: "grumpypanda@china.cn", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.MIGRATION_ERROR + + +async def test_init_with_no_username(hass: HomeAssistant, mock_apis_single_fp) -> None: + """Test the case where we completely fail to initialize.""" + mock_config_entry = MockConfigEntry( + domain=DOMAIN, + version=1, + minor_version=2, + data={ + CONF_IP_ADDRESS: "192.168.2.108", + CONF_PASSWORD: "you-stole-my-pandas", + CONF_SERIAL: "3FB284769E4736F30C8973A7ED358123", + CONF_WEB_CLIENT_ID: "FA2B1C3045601234D0AE17D72F8E975", + CONF_API_KEY: "B5C4DA27AAEF31D1FB21AFF9BFA6BCD2", + CONF_AUTH_COOKIE: "B984F21A6378560019F8A1CDE41B6782", + CONF_USER_ID: "52C3F9E8B9D3AC99F8E4D12345678901FE9A2BC7D85F7654E28BF98BCD123456", + }, + options={CONF_READ_MODE: API_MODE_LOCAL, CONF_CONTROL_MODE: API_MODE_CLOUD}, + unique_id="3FB284769E4736F30C8973A7ED358123", + ) + + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_connectivity_bad( + hass: HomeAssistant, + mock_config_entry_current, + mock_apis_single_fp, +) -> None: + """Test a timeout error on the setup flow.""" + + with patch( + "homeassistant.components.intellifire.UnifiedFireplace.build_fireplace_from_common", + new_callable=AsyncMock, + side_effect=TimeoutError, + ): + mock_config_entry_current.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry_current.entry_id) + + await hass.async_block_till_done() + assert len(hass.states.async_all()) == 0 diff --git a/tests/components/intellifire/test_sensor.py b/tests/components/intellifire/test_sensor.py new file mode 100644 index 00000000000..96e344d77fc --- /dev/null +++ b/tests/components/intellifire/test_sensor.py @@ -0,0 +1,35 @@ +"""Test IntelliFire Binary Sensors.""" + +from unittest.mock import AsyncMock, patch + +from freezegun import freeze_time +import pytest +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +@freeze_time("2021-01-01T12:00:00Z") +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_sensor_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry_current: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_apis_single_fp: tuple[AsyncMock, AsyncMock, AsyncMock], +) -> None: + """Test all entities.""" + + with ( + patch("homeassistant.components.intellifire.PLATFORMS", [Platform.SENSOR]), + ): + await setup_integration(hass, mock_config_entry_current) + await snapshot_platform( + hass, entity_registry, snapshot, mock_config_entry_current.entry_id + ) diff --git a/tests/components/intent_script/test_init.py b/tests/components/intent_script/test_init.py index 86f3a7aba46..26c575f0407 100644 --- a/tests/components/intent_script/test_init.py +++ b/tests/components/intent_script/test_init.py @@ -6,7 +6,12 @@ from homeassistant import config as hass_config from homeassistant.components.intent_script import DOMAIN from homeassistant.const import SERVICE_RELOAD from homeassistant.core import HomeAssistant -from homeassistant.helpers import intent +from homeassistant.helpers import ( + area_registry as ar, + entity_registry as er, + floor_registry as fr, + intent, +) from homeassistant.setup import async_setup_component from tests.common import async_mock_service, get_fixture_path @@ -197,6 +202,84 @@ async def test_intent_script_falsy_reprompt(hass: HomeAssistant) -> None: assert response.card["simple"]["content"] == "Content for Paulus" +async def test_intent_script_targets( + hass: HomeAssistant, + area_registry: ar.AreaRegistry, + entity_registry: er.EntityRegistry, + floor_registry: fr.FloorRegistry, +) -> None: + """Test intent scripts work.""" + calls = async_mock_service(hass, "test", "service") + + await async_setup_component( + hass, + "intent_script", + { + "intent_script": { + "Targets": { + "description": "Intent to control a test service.", + "action": { + "service": "test.service", + "data_template": { + "targets": "{{ targets if targets is defined }}", + }, + }, + "speech": { + "text": "{{ targets.entities[0] if targets is defined }}" + }, + } + } + }, + ) + + floor_1 = floor_registry.async_create("first floor") + kitchen = area_registry.async_get_or_create("kitchen") + area_registry.async_update(kitchen.id, floor_id=floor_1.floor_id) + entity_registry.async_get_or_create( + "light", "demo", "1234", suggested_object_id="kitchen" + ) + entity_registry.async_update_entity("light.kitchen", area_id=kitchen.id) + hass.states.async_set("light.kitchen", "off") + + response = await intent.async_handle( + hass, + "test", + "Targets", + {"name": {"value": "kitchen"}, "domain": {"value": "light"}}, + ) + assert len(calls) == 1 + assert calls[0].data["targets"] == {"entities": ["light.kitchen"]} + assert response.speech["plain"]["speech"] == "light.kitchen" + calls.clear() + + response = await intent.async_handle( + hass, + "test", + "Targets", + { + "area": {"value": "kitchen"}, + "floor": {"value": "first floor"}, + }, + ) + assert len(calls) == 1 + assert calls[0].data["targets"] == { + "entities": ["light.kitchen"], + "areas": ["kitchen"], + "floors": ["first_floor"], + } + calls.clear() + + response = await intent.async_handle( + hass, + "test", + "Targets", + {"device_class": {"value": "door"}}, + ) + assert len(calls) == 1 + assert calls[0].data["targets"] == "" + calls.clear() + + async def test_reload(hass: HomeAssistant) -> None: """Verify we can reload intent config.""" diff --git a/tests/components/iotty/conftest.py b/tests/components/iotty/conftest.py index 7961a4ce3a1..9f858879cb9 100644 --- a/tests/components/iotty/conftest.py +++ b/tests/components/iotty/conftest.py @@ -65,7 +65,7 @@ def aiohttp_client_session() -> None: @pytest.fixture -def mock_aioclient() -> Generator[AiohttpClientMocker, None, None]: +def mock_aioclient() -> Generator[AiohttpClientMocker]: """Fixture to mock aioclient calls.""" with mock_aiohttp_client() as mock_session: yield mock_session @@ -96,7 +96,7 @@ def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: @pytest.fixture -def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock, None, None]: +def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock]: """Mock async_forward_entry_setup.""" with patch( "homeassistant.config_entries.ConfigEntries.async_forward_entry_setups" @@ -105,7 +105,7 @@ def mock_config_entries_async_forward_entry_setup() -> Generator[AsyncMock, None @pytest.fixture -def mock_setup_entry() -> Generator[AsyncMock, None, None]: +def mock_setup_entry() -> Generator[AsyncMock]: """Mock setting up a config entry.""" with patch( "homeassistant.components.iotty.async_setup_entry", return_value=True @@ -114,7 +114,7 @@ def mock_setup_entry() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_iotty() -> Generator[None, MagicMock, None]: +def mock_iotty() -> Generator[MagicMock]: """Mock IottyProxy.""" with patch( "homeassistant.components.iotty.api.IottyProxy", autospec=True @@ -123,7 +123,7 @@ def mock_iotty() -> Generator[None, MagicMock, None]: @pytest.fixture -def mock_coordinator() -> Generator[None, MagicMock, None]: +def mock_coordinator() -> Generator[MagicMock]: """Mock IottyDataUpdateCoordinator.""" with patch( "homeassistant.components.iotty.coordinator.IottyDataUpdateCoordinator", @@ -133,7 +133,7 @@ def mock_coordinator() -> Generator[None, MagicMock, None]: @pytest.fixture -def mock_get_devices_nodevices() -> Generator[AsyncMock, None, None]: +def mock_get_devices_nodevices() -> Generator[AsyncMock]: """Mock for get_devices, returning two objects.""" with patch("iottycloud.cloudapi.CloudApi.get_devices") as mock_fn: @@ -141,7 +141,7 @@ def mock_get_devices_nodevices() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_get_devices_twolightswitches() -> Generator[AsyncMock, None, None]: +def mock_get_devices_twolightswitches() -> Generator[AsyncMock]: """Mock for get_devices, returning two objects.""" with patch( @@ -151,7 +151,7 @@ def mock_get_devices_twolightswitches() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_command_fn() -> Generator[AsyncMock, None, None]: +def mock_command_fn() -> Generator[AsyncMock]: """Mock for command.""" with patch("iottycloud.cloudapi.CloudApi.command", return_value=None) as mock_fn: @@ -159,7 +159,7 @@ def mock_command_fn() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_get_status_filled_off() -> Generator[AsyncMock, None, None]: +def mock_get_status_filled_off() -> Generator[AsyncMock]: """Mock setting up a get_status.""" retval = {RESULT: {STATUS: STATUS_OFF}} @@ -170,7 +170,7 @@ def mock_get_status_filled_off() -> Generator[AsyncMock, None, None]: @pytest.fixture -def mock_get_status_filled() -> Generator[AsyncMock, None, None]: +def mock_get_status_filled() -> Generator[AsyncMock]: """Mock setting up a get_status.""" retval = {RESULT: {STATUS: STATUS_ON}} diff --git a/tests/components/ipma/snapshots/test_weather.ambr b/tests/components/ipma/snapshots/test_weather.ambr index 1142cb7cfe5..80f385546d1 100644 --- a/tests/components/ipma/snapshots/test_weather.ambr +++ b/tests/components/ipma/snapshots/test_weather.ambr @@ -1,119 +1,4 @@ # serializer version: 1 -# name: test_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': '100.0', - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }) -# --- -# name: test_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }) -# --- -# name: test_forecast_service[forecast] - dict({ - 'weather.hometown': dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': '100.0', - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[forecast].1 - dict({ - 'weather.hometown': dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 16, 0, 0), - 'precipitation_probability': 100.0, - 'temperature': 16.2, - 'templow': 10.6, - 'wind_bearing': 'S', - 'wind_speed': 10.0, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'rainy', - 'datetime': datetime.datetime(2020, 1, 15, 1, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - dict({ - 'condition': 'clear-night', - 'datetime': datetime.datetime(2020, 1, 15, 2, 0, tzinfo=datetime.timezone.utc), - 'precipitation_probability': 80.0, - 'temperature': 12.0, - 'wind_bearing': 'S', - 'wind_speed': 32.7, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.hometown': dict({ diff --git a/tests/components/iron_os/conftest.py b/tests/components/iron_os/conftest.py index b6983074441..f489d7b7bb5 100644 --- a/tests/components/iron_os/conftest.py +++ b/tests/components/iron_os/conftest.py @@ -108,7 +108,7 @@ def mock_ble_device() -> Generator[MagicMock]: @pytest.fixture -def mock_pynecil() -> Generator[AsyncMock, None, None]: +def mock_pynecil() -> Generator[AsyncMock]: """Mock Pynecil library.""" with patch( "homeassistant.components.iron_os.Pynecil", autospec=True diff --git a/tests/components/iron_os/test_init.py b/tests/components/iron_os/test_init.py index fb0a782ea36..f7db2a813ec 100644 --- a/tests/components/iron_os/test_init.py +++ b/tests/components/iron_os/test_init.py @@ -11,6 +11,40 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry +@pytest.mark.usefixtures("mock_pynecil", "ble_device") +async def test_setup_and_unload( + hass: HomeAssistant, + config_entry: MockConfigEntry, +) -> None: + """Test integration setup and unload.""" + + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.usefixtures("ble_device") +async def test_update_data_config_entry_not_ready( + hass: HomeAssistant, + config_entry: MockConfigEntry, + mock_pynecil: AsyncMock, +) -> None: + """Test config entry not ready.""" + mock_pynecil.get_live_data.side_effect = CommunicationError + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + @pytest.mark.usefixtures("ble_device") async def test_setup_config_entry_not_ready( hass: HomeAssistant, diff --git a/tests/components/iron_os/test_number.py b/tests/components/iron_os/test_number.py index c091040668c..781492987ee 100644 --- a/tests/components/iron_os/test_number.py +++ b/tests/components/iron_os/test_number.py @@ -22,7 +22,7 @@ from tests.common import MockConfigEntry, snapshot_platform @pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None, None]: +async def sensor_only() -> AsyncGenerator[None]: """Enable only the number platform.""" with patch( "homeassistant.components.iron_os.PLATFORMS", diff --git a/tests/components/iron_os/test_sensor.py b/tests/components/iron_os/test_sensor.py index 0c35193e400..2f79487a7fd 100644 --- a/tests/components/iron_os/test_sensor.py +++ b/tests/components/iron_os/test_sensor.py @@ -18,7 +18,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_plat @pytest.fixture(autouse=True) -async def sensor_only() -> AsyncGenerator[None, None]: +async def sensor_only() -> AsyncGenerator[None]: """Enable only the sensor platform.""" with patch( "homeassistant.components.iron_os.PLATFORMS", diff --git a/tests/components/islamic_prayer_times/test_config_flow.py b/tests/components/islamic_prayer_times/test_config_flow.py index cb37a6b147d..695be636a84 100644 --- a/tests/components/islamic_prayer_times/test_config_flow.py +++ b/tests/components/islamic_prayer_times/test_config_flow.py @@ -3,7 +3,6 @@ import pytest from homeassistant import config_entries -from homeassistant.components import islamic_prayer_times from homeassistant.components.islamic_prayer_times.const import ( CONF_CALC_METHOD, CONF_LAT_ADJ_METHOD, @@ -24,7 +23,7 @@ pytestmark = pytest.mark.usefixtures("mock_setup_entry") async def test_flow_works(hass: HomeAssistant) -> None: """Test user config.""" result = await hass.config_entries.flow.async_init( - islamic_prayer_times.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -76,7 +75,7 @@ async def test_integration_already_configured(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) result = await hass.config_entries.flow.async_init( - islamic_prayer_times.DOMAIN, context={"source": config_entries.SOURCE_USER} + DOMAIN, context={"source": config_entries.SOURCE_USER} ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/islamic_prayer_times/test_init.py b/tests/components/islamic_prayer_times/test_init.py index 025a202e6da..7961b79676b 100644 --- a/tests/components/islamic_prayer_times/test_init.py +++ b/tests/components/islamic_prayer_times/test_init.py @@ -6,8 +6,7 @@ from unittest.mock import patch from freezegun import freeze_time import pytest -from homeassistant.components import islamic_prayer_times -from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD +from homeassistant.components.islamic_prayer_times.const import CONF_CALC_METHOD, DOMAIN from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_LATITUDE, CONF_LONGITUDE @@ -30,7 +29,7 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: """Test that Islamic Prayer Times is configured successfully.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -48,7 +47,7 @@ async def test_successful_config_entry(hass: HomeAssistant) -> None: async def test_unload_entry(hass: HomeAssistant) -> None: """Test removing Islamic Prayer Times.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -66,7 +65,7 @@ async def test_unload_entry(hass: HomeAssistant) -> None: async def test_options_listener(hass: HomeAssistant) -> None: """Ensure updating options triggers a coordinator refresh.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) with ( @@ -110,13 +109,13 @@ async def test_migrate_unique_id( old_unique_id: str, ) -> None: """Test unique id migration.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) entity: er.RegistryEntry = entity_registry.async_get_or_create( suggested_object_id=object_id, domain=SENSOR_DOMAIN, - platform=islamic_prayer_times.DOMAIN, + platform=DOMAIN, unique_id=old_unique_id, config_entry=entry, ) @@ -140,7 +139,7 @@ async def test_migrate_unique_id( async def test_migration_from_1_1_to_1_2(hass: HomeAssistant) -> None: """Test migrating from version 1.1 to 1.2.""" entry = MockConfigEntry( - domain=islamic_prayer_times.DOMAIN, + domain=DOMAIN, data={}, ) entry.add_to_hass(hass) @@ -164,7 +163,7 @@ async def test_migration_from_1_1_to_1_2(hass: HomeAssistant) -> None: async def test_update_scheduling(hass: HomeAssistant) -> None: """Test that integration schedules update immediately after Islamic midnight.""" - entry = MockConfigEntry(domain=islamic_prayer_times.DOMAIN, data={}) + entry = MockConfigEntry(domain=DOMAIN, data={}) entry.add_to_hass(hass) with ( diff --git a/tests/components/israel_rail/snapshots/test_sensor.ambr b/tests/components/israel_rail/snapshots/test_sensor.ambr index 9806ecb1fae..f851f1cd726 100644 --- a/tests/components/israel_rail/snapshots/test_sensor.ambr +++ b/tests/components/israel_rail/snapshots/test_sensor.ambr @@ -143,147 +143,6 @@ 'state': '2021-10-10T10:30:10+00:00', }) # --- -# name: test_valid_config[sensor.mock_title_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'platform', - 'unique_id': 'באר יעקב אשקלון_platform', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title None', - }), - 'context': , - 'entity_id': 'sensor.mock_title_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_valid_config[sensor.mock_title_none_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_none_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'trains', - 'unique_id': 'באר יעקב אשקלון_trains', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_none_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title None', - }), - 'context': , - 'entity_id': 'sensor.mock_title_none_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1', - }) -# --- -# name: test_valid_config[sensor.mock_title_none_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_none_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'train_number', - 'unique_id': 'באר יעקב אשקלון_train_number', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_none_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'friendly_name': 'Mock Title None', - }), - 'context': , - 'entity_id': 'sensor.mock_title_none_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1234', - }) -# --- # name: test_valid_config[sensor.mock_title_platform-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -331,150 +190,6 @@ 'state': '1', }) # --- -# name: test_valid_config[sensor.mock_title_timestamp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_timestamp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timestamp', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure0', - 'unique_id': 'באר יעקב אשקלון_departure', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Timestamp', - }), - 'context': , - 'entity_id': 'sensor.mock_title_timestamp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:10:10+00:00', - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_timestamp_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timestamp', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure1', - 'unique_id': 'באר יעקב אשקלון_departure1', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Timestamp', - }), - 'context': , - 'entity_id': 'sensor.mock_title_timestamp_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:20:10+00:00', - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.mock_title_timestamp_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timestamp', - 'platform': 'israel_rail', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'departure2', - 'unique_id': 'באר יעקב אשקלון_departure2', - 'unit_of_measurement': None, - }) -# --- -# name: test_valid_config[sensor.mock_title_timestamp_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Data provided by Israel rail.', - 'device_class': 'timestamp', - 'friendly_name': 'Mock Title Timestamp', - }), - 'context': , - 'entity_id': 'sensor.mock_title_timestamp_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2021-10-10T10:30:10+00:00', - }) -# --- # name: test_valid_config[sensor.mock_title_train_number-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ista_ecotrend/conftest.py b/tests/components/ista_ecotrend/conftest.py index cbbc166031d..7edf2e4717b 100644 --- a/tests/components/ista_ecotrend/conftest.py +++ b/tests/components/ista_ecotrend/conftest.py @@ -166,3 +166,52 @@ def get_consumption_data(obj_uuid: str | None = None) -> dict[str, Any]: }, ], } + + +def extend_statistics(obj_uuid: str | None = None) -> dict[str, Any]: + """Extend statistics data with new values.""" + stats = get_consumption_data(obj_uuid) + + stats["costs"].insert( + 0, + { + "date": {"month": 6, "year": 2024}, + "costsByEnergyType": [ + { + "type": "heating", + "value": 9000, + }, + { + "type": "warmwater", + "value": 9000, + }, + { + "type": "water", + "value": 9000, + }, + ], + }, + ) + stats["consumptions"].insert( + 0, + { + "date": {"month": 6, "year": 2024}, + "readings": [ + { + "type": "heating", + "value": "9000", + "additionalValue": "9000,0", + }, + { + "type": "warmwater", + "value": "9999,0", + "additionalValue": "90000,0", + }, + { + "type": "water", + "value": "9000,0", + }, + ], + }, + ) + return stats diff --git a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr index f9ab7a54b63..b5056019c74 100644 --- a/tests/components/ista_ecotrend/snapshots/test_sensor.ambr +++ b/tests/components/ista_ecotrend/snapshots/test_sensor.ambr @@ -1,64 +1,4 @@ # serializer version: 1 -# name: test_setup.32 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'https://ecotrend.ista.de/', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'ista_ecotrend', - '26e93f1a-c828-11ea-87d0-0242ac130003', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'ista SE', - 'model': 'ista EcoTrend', - 'name': 'Luxemburger Str. 1', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- -# name: test_setup.33 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': 'https://ecotrend.ista.de/', - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'identifiers': set({ - tuple( - 'ista_ecotrend', - 'eaf5c5c8-889f-4a3c-b68c-e9a676505762', - ), - }), - 'is_new': False, - 'labels': set({ - }), - 'manufacturer': 'ista SE', - 'model': 'ista EcoTrend', - 'name': 'Bahnhofsstr. 1A', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- # name: test_setup[sensor.bahnhofsstr_1a_heating-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/ista_ecotrend/snapshots/test_statistics.ambr b/tests/components/ista_ecotrend/snapshots/test_statistics.ambr new file mode 100644 index 00000000000..78ecd6a6b6b --- /dev/null +++ b/tests/components/ista_ecotrend/snapshots/test_statistics.ambr @@ -0,0 +1,609 @@ +# serializer version: 1 +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9083.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_heating_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9999.0, + 'sum': 10001.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9014.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_hot_water_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 90000.0, + 'sum': 90118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9011.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:bahnhofsstr_1a_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9005.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 104.0, + 'sum': 104.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 35.0, + 'sum': 139.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9139.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 62.0, + 'sum': 62.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 21.0, + 'sum': 83.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9083.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_heating_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 113.0, + 'sum': 113.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 38.0, + 'sum': 151.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9151.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 1.1, + 'sum': 1.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 1.0, + 'sum': 2.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9999.0, + 'sum': 10001.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 7.0, + 'sum': 7.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 7.0, + 'sum': 14.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9014.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_energy_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_hot_water_energy_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 61.1, + 'sum': 61.1, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 57.0, + 'sum': 118.1, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 90000.0, + 'sum': 90118.1, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 6.8, + 'sum': 6.8, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 5.0, + 'sum': 11.8, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9011.8, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_cost_2months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + ]) +# --- +# name: test_statistics_import[ista_ecotrend:luxemburger_str_1_water_cost_3months] + list([ + dict({ + 'end': 1714546800.0, + 'start': 1711954800.0, + 'state': 2.0, + 'sum': 2.0, + }), + dict({ + 'end': 1717225200.0, + 'start': 1714546800.0, + 'state': 3.0, + 'sum': 5.0, + }), + dict({ + 'end': 1719817200.0, + 'start': 1717225200.0, + 'state': 9000.0, + 'sum': 9005.0, + }), + ]) +# --- diff --git a/tests/components/ista_ecotrend/test_config_flow.py b/tests/components/ista_ecotrend/test_config_flow.py index b702b0331e8..d6c88c51c99 100644 --- a/tests/components/ista_ecotrend/test_config_flow.py +++ b/tests/components/ista_ecotrend/test_config_flow.py @@ -6,7 +6,7 @@ from pyecotrend_ista import LoginError, ServerError import pytest from homeassistant.components.ista_ecotrend.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -98,15 +98,7 @@ async def test_reauth( ista_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": ista_config_entry.entry_id, - "unique_id": ista_config_entry.unique_id, - }, - ) - + result = await ista_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -148,15 +140,7 @@ async def test_reauth_error_and_recover( ista_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": ista_config_entry.entry_id, - "unique_id": ista_config_entry.unique_id, - }, - ) - + result = await ista_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/ista_ecotrend/test_statistics.py b/tests/components/ista_ecotrend/test_statistics.py new file mode 100644 index 00000000000..21877f686df --- /dev/null +++ b/tests/components/ista_ecotrend/test_statistics.py @@ -0,0 +1,86 @@ +"""Tests for the ista EcoTrend Statistics import.""" + +import datetime +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.recorder.statistics import statistics_during_period +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import extend_statistics + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.components.recorder.common import async_wait_recording_done + + +@pytest.mark.usefixtures("recorder_mock", "entity_registry_enabled_by_default") +async def test_statistics_import( + hass: HomeAssistant, + ista_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_ista: MagicMock, + snapshot: SnapshotAssertion, + freezer: FrozenDateTimeFactory, +) -> None: + """Test setup of ista EcoTrend sensor platform.""" + + ista_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(ista_config_entry.entry_id) + await hass.async_block_till_done() + + assert ista_config_entry.state is ConfigEntryState.LOADED + entities = er.async_entries_for_config_entry( + entity_registry, ista_config_entry.entry_id + ) + await async_wait_recording_done(hass) + + # Test that consumption statistics for 2 months have been added + for entity in entities: + statistic_id = f"ista_ecotrend:{entity.entity_id.removeprefix("sensor.")}" + stats = await hass.async_add_executor_job( + statistics_during_period, + hass, + datetime.datetime.fromtimestamp(0, tz=datetime.UTC), + None, + {statistic_id}, + "month", + None, + {"state", "sum"}, + ) + assert stats[statistic_id] == snapshot(name=f"{statistic_id}_2months") + assert len(stats[statistic_id]) == 2 + + # Add another monthly consumption and forward + # 1 day and test if the new values have been + # appended to the statistics + mock_ista.get_consumption_data = extend_statistics + + freezer.tick(datetime.timedelta(days=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + await async_wait_recording_done(hass) + freezer.tick(datetime.timedelta(days=1)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + await async_wait_recording_done(hass) + + for entity in entities: + statistic_id = f"ista_ecotrend:{entity.entity_id.removeprefix("sensor.")}" + stats = await hass.async_add_executor_job( + statistics_during_period, + hass, + datetime.datetime.fromtimestamp(0, tz=datetime.UTC), + None, + {statistic_id}, + "month", + None, + {"state", "sum"}, + ) + assert stats[statistic_id] == snapshot(name=f"{statistic_id}_3months") + + assert len(stats[statistic_id]) == 3 diff --git a/tests/components/isy994/test_config_flow.py b/tests/components/isy994/test_config_flow.py index 411439e2e70..34e267fe904 100644 --- a/tests/components/isy994/test_config_flow.py +++ b/tests/components/isy994/test_config_flow.py @@ -644,10 +644,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": MOCK_UUID}, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/izone/test_config_flow.py b/tests/components/izone/test_config_flow.py index 6591e402ec2..3c9707b34c6 100644 --- a/tests/components/izone/test_config_flow.py +++ b/tests/components/izone/test_config_flow.py @@ -1,5 +1,7 @@ """Tests for iZone.""" +from collections.abc import Callable +from typing import Any from unittest.mock import Mock, patch import pytest @@ -12,7 +14,7 @@ from homeassistant.helpers.dispatcher import async_dispatcher_send @pytest.fixture -def mock_disco(): +def mock_disco() -> Mock: """Mock discovery service.""" disco = Mock() disco.pi_disco = Mock() @@ -20,15 +22,15 @@ def mock_disco(): return disco -def _mock_start_discovery(hass, mock_disco): - def do_disovered(*args): +def _mock_start_discovery(hass: HomeAssistant, mock_disco: Mock) -> Callable[..., Mock]: + def do_disovered(*args: Any) -> Mock: async_dispatcher_send(hass, DISPATCH_CONTROLLER_DISCOVERED, True) return mock_disco return do_disovered -async def test_not_found(hass: HomeAssistant, mock_disco) -> None: +async def test_not_found(hass: HomeAssistant, mock_disco: Mock) -> None: """Test not finding iZone controller.""" with ( @@ -56,7 +58,7 @@ async def test_not_found(hass: HomeAssistant, mock_disco) -> None: stop_disco.assert_called_once() -async def test_found(hass: HomeAssistant, mock_disco) -> None: +async def test_found(hass: HomeAssistant, mock_disco: Mock) -> None: """Test not finding iZone controller.""" mock_disco.pi_disco.controllers["blah"] = object() diff --git a/tests/components/jellyfin/test_config_flow.py b/tests/components/jellyfin/test_config_flow.py index c84a12d26a5..a8ffbcbf46c 100644 --- a/tests/components/jellyfin/test_config_flow.py +++ b/tests/components/jellyfin/test_config_flow.py @@ -222,14 +222,7 @@ async def test_reauth( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -272,14 +265,7 @@ async def test_reauth_cannot_connect( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -339,14 +325,7 @@ async def test_reauth_invalid( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -400,14 +379,7 @@ async def test_reauth_exception( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_INPUT, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/jewish_calendar/__init__.py b/tests/components/jewish_calendar/__init__.py index 60726fc3a3e..440bffc2256 100644 --- a/tests/components/jewish_calendar/__init__.py +++ b/tests/components/jewish_calendar/__init__.py @@ -8,7 +8,7 @@ from freezegun import freeze_time as alter_time # noqa: F401 from homeassistant.components import jewish_calendar import homeassistant.util.dt as dt_util -_LatLng = namedtuple("_LatLng", ["lat", "lng"]) +_LatLng = namedtuple("_LatLng", ["lat", "lng"]) # noqa: PYI024 HDATE_DEFAULT_ALTITUDE = 754 NYC_LATLNG = _LatLng(40.7128, -74.0060) diff --git a/tests/components/justnimbus/test_config_flow.py b/tests/components/justnimbus/test_config_flow.py index f66693a752c..330b05bf48c 100644 --- a/tests/components/justnimbus/test_config_flow.py +++ b/tests/components/justnimbus/test_config_flow.py @@ -125,14 +125,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, - data=FIXTURE_OLD_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/jvc_projector/test_config_flow.py b/tests/components/jvc_projector/test_config_flow.py index 282411540a4..d7eb0995bbd 100644 --- a/tests/components/jvc_projector/test_config_flow.py +++ b/tests/components/jvc_projector/test_config_flow.py @@ -6,7 +6,7 @@ from jvcprojector import JvcProjectorAuthError, JvcProjectorConnectError import pytest from homeassistant.components.jvc_projector.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -163,14 +163,7 @@ async def test_reauth_config_flow_success( hass: HomeAssistant, mock_device: AsyncMock, mock_integration: MockConfigEntry ) -> None: """Test reauth config flow success.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -194,14 +187,7 @@ async def test_reauth_config_flow_auth_error( """Test reauth config flow when connect fails.""" mock_device.connect.side_effect = JvcProjectorAuthError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -218,14 +204,7 @@ async def test_reauth_config_flow_auth_error( mock_device.connect.side_effect = None - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -249,14 +228,7 @@ async def test_reauth_config_flow_connect_error( """Test reauth config flow when connect fails.""" mock_device.connect.side_effect = JvcProjectorConnectError - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -273,14 +245,7 @@ async def test_reauth_config_flow_connect_error( mock_device.connect.side_effect = None - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_integration.entry_id, - }, - data={CONF_HOST: MOCK_HOST, CONF_PORT: MOCK_PORT}, - ) + result = await mock_integration.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/jvc_projector/test_coordinator.py b/tests/components/jvc_projector/test_coordinator.py index 24297348653..b9211250aff 100644 --- a/tests/components/jvc_projector/test_coordinator.py +++ b/tests/components/jvc_projector/test_coordinator.py @@ -5,7 +5,6 @@ from unittest.mock import AsyncMock from jvcprojector import JvcProjectorAuthError, JvcProjectorConnectError -from homeassistant.components.jvc_projector import DOMAIN from homeassistant.components.jvc_projector.coordinator import ( INTERVAL_FAST, INTERVAL_SLOW, @@ -29,7 +28,7 @@ async def test_coordinator_update( ) await hass.async_block_till_done() assert mock_device.get_state.call_count == 3 - coordinator = hass.data[DOMAIN][mock_integration.entry_id] + coordinator = mock_integration.runtime_data assert coordinator.update_interval == INTERVAL_SLOW @@ -69,5 +68,5 @@ async def test_coordinator_device_on( mock_config_entry.add_to_hass(hass) await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - coordinator = hass.data[DOMAIN][mock_config_entry.entry_id] + coordinator = mock_config_entry.runtime_data assert coordinator.update_interval == INTERVAL_FAST diff --git a/tests/components/jvc_projector/test_init.py b/tests/components/jvc_projector/test_init.py index ef9de41ca32..baf088a5dba 100644 --- a/tests/components/jvc_projector/test_init.py +++ b/tests/components/jvc_projector/test_init.py @@ -38,8 +38,6 @@ async def test_unload_config_entry( await hass.config_entries.async_unload(mock_config_entry.entry_id) await hass.async_block_till_done() - assert mock_config_entry.entry_id not in hass.data[DOMAIN] - async def test_config_entry_connect_error( hass: HomeAssistant, diff --git a/tests/components/keymitt_ble/__init__.py b/tests/components/keymitt_ble/__init__.py index 1e717b805c5..6fa608ad3b4 100644 --- a/tests/components/keymitt_ble/__init__.py +++ b/tests/components/keymitt_ble/__init__.py @@ -53,7 +53,7 @@ SERVICE_INFO = BluetoothServiceInfoBleak( class MockMicroBotApiClient: """Mock MicroBotApiClient.""" - def __init__(self, device, token): + def __init__(self, device, token) -> None: """Mock init.""" async def connect(self, init): @@ -70,7 +70,7 @@ class MockMicroBotApiClient: class MockMicroBotApiClientFail: """Mock MicroBotApiClient.""" - def __init__(self, device, token): + def __init__(self, device, token) -> None: """Mock init.""" async def connect(self, init): diff --git a/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr b/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr index 4189de18ce4..e3e413c5a44 100644 --- a/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr +++ b/tests/components/kitchen_sink/snapshots/test_lawn_mower.ambr @@ -49,6 +49,18 @@ 'last_updated': , 'state': 'docked', }), + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mower can return', + 'supported_features': , + }), + 'context': , + 'entity_id': 'lawn_mower.mower_can_return', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'returning', + }), StateSnapshot({ 'attributes': ReadOnlyDict({ 'friendly_name': 'Mower is paused', diff --git a/tests/components/kitchen_sink/test_init.py b/tests/components/kitchen_sink/test_init.py index 0575141bb3b..b832577a48a 100644 --- a/tests/components/kitchen_sink/test_init.py +++ b/tests/components/kitchen_sink/test_init.py @@ -5,6 +5,7 @@ from http import HTTPStatus from unittest.mock import ANY import pytest +import voluptuous as vol from homeassistant.components.kitchen_sink import DOMAIN from homeassistant.components.recorder import get_instance @@ -324,3 +325,24 @@ async def test_issues_created( }, ] } + + +async def test_service( + hass: HomeAssistant, +) -> None: + """Test we can call the service.""" + assert await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) + + with pytest.raises(vol.error.MultipleInvalid): + await hass.services.async_call(DOMAIN, "test_service_1", blocking=True) + + await hass.services.async_call( + DOMAIN, "test_service_1", {"field_1": 1, "field_2": "auto"}, blocking=True + ) + + await hass.services.async_call( + DOMAIN, + "test_service_1", + {"field_1": 1, "field_2": "auto", "field_3": 1, "field_4": "forwards"}, + blocking=True, + ) diff --git a/tests/components/kitchen_sink/test_lawn_mower.py b/tests/components/kitchen_sink/test_lawn_mower.py index 48914ab5a46..e1ba201a722 100644 --- a/tests/components/kitchen_sink/test_lawn_mower.py +++ b/tests/components/kitchen_sink/test_lawn_mower.py @@ -72,6 +72,12 @@ async def test_states(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: LawnMowerActivity.MOWING, LawnMowerActivity.DOCKED, ), + ( + "lawn_mower.mower_can_return", + SERVICE_DOCK, + LawnMowerActivity.RETURNING, + LawnMowerActivity.DOCKED, + ), ], ) async def test_mower( diff --git a/tests/components/knx/test_climate.py b/tests/components/knx/test_climate.py index 9f198b48bd4..ec0498dc447 100644 --- a/tests/components/knx/test_climate.py +++ b/tests/components/knx/test_climate.py @@ -2,7 +2,7 @@ import pytest -from homeassistant.components.climate import PRESET_ECO, PRESET_SLEEP, HVACMode +from homeassistant.components.climate import HVACMode from homeassistant.components.knx.schema import ClimateSchema from homeassistant.const import CONF_NAME, STATE_IDLE from homeassistant.core import HomeAssistant @@ -331,7 +331,6 @@ async def test_climate_preset_mode( } } ) - events = async_capture_events(hass, "state_changed") # StateUpdater initialize state # StateUpdater semaphore allows 2 concurrent requests @@ -340,30 +339,28 @@ async def test_climate_preset_mode( await knx.receive_response("1/2/3", RAW_FLOAT_21_0) await knx.receive_response("1/2/5", RAW_FLOAT_22_0) await knx.assert_read("1/2/7") - await knx.receive_response("1/2/7", (0x01,)) - events.clear() + await knx.receive_response("1/2/7", (0x01,)) # comfort + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="comfort") # set preset mode await hass.services.async_call( "climate", "set_preset_mode", - {"entity_id": "climate.test", "preset_mode": PRESET_ECO}, + {"entity_id": "climate.test", "preset_mode": "building_protection"}, blocking=True, ) await knx.assert_write("1/2/6", (0x04,)) - assert len(events) == 1 - events.pop() + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="building_protection") # set preset mode await hass.services.async_call( "climate", "set_preset_mode", - {"entity_id": "climate.test", "preset_mode": PRESET_SLEEP}, + {"entity_id": "climate.test", "preset_mode": "economy"}, blocking=True, ) await knx.assert_write("1/2/6", (0x03,)) - assert len(events) == 1 - events.pop() + knx.assert_state("climate.test", HVACMode.HEAT, preset_mode="economy") assert len(knx.xknx.devices) == 2 assert len(knx.xknx.devices[0].device_updated_cbs) == 2 diff --git a/tests/components/knx/test_config_flow.py b/tests/components/knx/test_config_flow.py index a7da2d26600..78751c7e641 100644 --- a/tests/components/knx/test_config_flow.py +++ b/tests/components/knx/test_config_flow.py @@ -1,7 +1,7 @@ """Test the KNX config flow.""" from contextlib import contextmanager -from unittest.mock import Mock, patch +from unittest.mock import MagicMock, Mock, patch import pytest from xknx.exceptions.exception import CommunicationError, InvalidSecureConfiguration @@ -126,7 +126,7 @@ def _gateway_descriptor( class GatewayScannerMock: """Mock GatewayScanner.""" - def __init__(self, gateways=None): + def __init__(self, gateways=None) -> None: """Initialize GatewayScannerMock.""" # Key is a HPAI instance in xknx, but not used in HA anyway. self.found_gateways = ( @@ -510,7 +510,7 @@ async def test_routing_secure_keyfile( return_value=GatewayScannerMock(), ) async def test_tunneling_setup_manual( - _gateway_scanner_mock, + gateway_scanner_mock: MagicMock, hass: HomeAssistant, knx_setup, user_input, @@ -559,7 +559,7 @@ async def test_tunneling_setup_manual( return_value=GatewayScannerMock(), ) async def test_tunneling_setup_manual_request_description_error( - _gateway_scanner_mock, + gateway_scanner_mock: MagicMock, hass: HomeAssistant, knx_setup, ) -> None: @@ -700,7 +700,10 @@ async def test_tunneling_setup_manual_request_description_error( return_value=_gateway_descriptor("192.168.0.2", 3675), ) async def test_tunneling_setup_for_local_ip( - _request_description_mock, _gateway_scanner_mock, hass: HomeAssistant, knx_setup + request_description_mock: MagicMock, + gateway_scanner_mock: MagicMock, + hass: HomeAssistant, + knx_setup, ) -> None: """Test tunneling if only one gateway is found.""" result = await hass.config_entries.flow.async_init( @@ -962,7 +965,7 @@ async def _get_menu_step_secure_tunnel(hass: HomeAssistant) -> FlowResult: ), ) async def test_get_secure_menu_step_manual_tunnelling( - _request_description_mock, + request_description_mock: MagicMock, hass: HomeAssistant, ) -> None: """Test flow reaches secure_tunnellinn menu step from manual tunnelling configuration.""" diff --git a/tests/components/knx/test_expose.py b/tests/components/knx/test_expose.py index c4d0acf0ce2..0fd790a3e33 100644 --- a/tests/components/knx/test_expose.py +++ b/tests/components/knx/test_expose.py @@ -108,6 +108,11 @@ async def test_expose_attribute(hass: HomeAssistant, knx: KNXTestKit) -> None: await hass.async_block_till_done() await knx.assert_telegram_count(0) + # Ignore "unavailable" state + hass.states.async_set(entity_id, "unavailable", {attribute: None}) + await hass.async_block_till_done() + await knx.assert_telegram_count(0) + async def test_expose_attribute_with_default( hass: HomeAssistant, knx: KNXTestKit @@ -131,7 +136,7 @@ async def test_expose_attribute_with_default( await knx.receive_read("1/1/8") await knx.assert_response("1/1/8", (0,)) - # Change state to "on"; no attribute + # Change state to "on"; no attribute -> default hass.states.async_set(entity_id, "on", {}) await hass.async_block_till_done() await knx.assert_write("1/1/8", (0,)) @@ -146,6 +151,11 @@ async def test_expose_attribute_with_default( await hass.async_block_till_done() await knx.assert_no_telegram() + # Use default for "unavailable" state + hass.states.async_set(entity_id, "unavailable") + await hass.async_block_till_done() + await knx.assert_write("1/1/8", (0,)) + # Change state and attribute hass.states.async_set(entity_id, "on", {attribute: 3}) await hass.async_block_till_done() @@ -290,8 +300,18 @@ async def test_expose_value_template( assert "Error rendering value template for KNX expose" in caplog.text +@pytest.mark.parametrize( + "invalid_attribute", + [ + 101.0, + "invalid", # can't cast to float + ], +) async def test_expose_conversion_exception( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, knx: KNXTestKit + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + knx: KNXTestKit, + invalid_attribute: str, ) -> None: """Test expose throws exception.""" @@ -313,16 +333,17 @@ async def test_expose_conversion_exception( await knx.receive_read("1/1/8") await knx.assert_response("1/1/8", (3,)) + caplog.clear() # Change attribute: Expect no exception hass.states.async_set( entity_id, "on", - {attribute: 101}, + {attribute: invalid_attribute}, ) await hass.async_block_till_done() await knx.assert_no_telegram() assert ( - 'Could not expose fake.entity fake_attribute value "101.0" to KNX:' + f'Could not expose fake.entity fake_attribute value "{invalid_attribute}" to KNX:' in caplog.text ) diff --git a/tests/components/knx/test_knx_selectors.py b/tests/components/knx/test_knx_selectors.py index 432a0fb9f80..7b2f09af84b 100644 --- a/tests/components/knx/test_knx_selectors.py +++ b/tests/components/knx/test_knx_selectors.py @@ -1,5 +1,7 @@ """Test KNX selectors.""" +from typing import Any + import pytest import voluptuous as vol @@ -111,7 +113,11 @@ INVALID = "invalid" ), ], ) -def test_ga_selector(selector_config, data, expected): +def test_ga_selector( + selector_config: dict[str, Any], + data: dict[str, Any], + expected: str | dict[str, Any], +) -> None: """Test GASelector.""" selector = GASelector(**selector_config) if expected == INVALID: diff --git a/tests/components/knx/test_telegrams.py b/tests/components/knx/test_telegrams.py index 2eda718f5ac..69e3208879c 100644 --- a/tests/components/knx/test_telegrams.py +++ b/tests/components/knx/test_telegrams.py @@ -39,7 +39,7 @@ MOCK_TELEGRAMS = [ "dpt_name": None, "payload": [1, 2, 3, 4], "source": "0.0.0", - "source_name": "", + "source_name": "Home Assistant", "telegramtype": "GroupValueWrite", "timestamp": MOCK_TIMESTAMP, "unit": None, diff --git a/tests/components/knx/test_websocket.py b/tests/components/knx/test_websocket.py index 309ea111709..e747b0daade 100644 --- a/tests/components/knx/test_websocket.py +++ b/tests/components/knx/test_websocket.py @@ -3,6 +3,8 @@ from typing import Any from unittest.mock import patch +import pytest + from homeassistant.components.knx import DOMAIN, KNX_ADDRESS, SwitchSchema from homeassistant.components.knx.project import STORAGE_KEY as KNX_PROJECT_STORAGE_KEY from homeassistant.const import CONF_NAME @@ -355,3 +357,28 @@ async def test_knx_subscribe_telegrams_command_project( ) assert res["event"]["direction"] == "Incoming" assert res["event"]["timestamp"] is not None + + +@pytest.mark.parametrize( + "endpoint", + [ + "knx/info", # sync ws-command + "knx/get_knx_project", # async ws-command + ], +) +async def test_websocket_when_config_entry_unloaded( + hass: HomeAssistant, + knx: KNXTestKit, + hass_ws_client: WebSocketGenerator, + endpoint: str, +) -> None: + """Test websocket connection when config entry is unloaded.""" + await knx.setup_integration({}) + await hass.config_entries.async_unload(knx.mock_config_entry.entry_id) + client = await hass_ws_client(hass) + + await client.send_json_auto_id({"type": endpoint}) + res = await client.receive_json() + assert not res["success"] + assert res["error"]["code"] == "home_assistant_error" + assert res["error"]["message"] == "KNX integration not loaded." diff --git a/tests/components/kodi/util.py b/tests/components/kodi/util.py index 6217a77903b..e56ba03b7e5 100644 --- a/tests/components/kodi/util.py +++ b/tests/components/kodi/util.py @@ -63,7 +63,7 @@ def get_kodi_connection( class MockConnection: """A mock kodi connection.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Mock the Kodi connection.""" self._connected = connected @@ -92,7 +92,7 @@ class MockConnection: class MockWSConnection: """A mock kodi websocket connection.""" - def __init__(self, connected=True): + def __init__(self, connected=True) -> None: """Mock the websocket connection.""" self._connected = connected diff --git a/tests/components/konnected/test_panel.py b/tests/components/konnected/test_panel.py index 64cc414cdd3..48ebea64161 100644 --- a/tests/components/konnected/test_panel.py +++ b/tests/components/konnected/test_panel.py @@ -700,4 +700,4 @@ async def test_connect_retry(hass: HomeAssistant, mock_panel) -> None: async_fire_time_changed(hass, utcnow() + timedelta(seconds=21)) await hass.async_block_till_done() await async_update_entity(hass, "switch.konnected_445566_actuator_6") - assert hass.states.get("switch.konnected_445566_actuator_6").state == "off" + assert hass.states.get("switch.konnected_445566_actuator_6").state == "unknown" diff --git a/tests/components/lacrosse_view/test_config_flow.py b/tests/components/lacrosse_view/test_config_flow.py index 5a48b3d15fe..9ca7fb78bdd 100644 --- a/tests/components/lacrosse_view/test_config_flow.py +++ b/tests/components/lacrosse_view/test_config_flow.py @@ -251,16 +251,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - }, - data=data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/lamarzocco/test_config_flow.py b/tests/components/lamarzocco/test_config_flow.py index 92ecd0a13f4..39896926c61 100644 --- a/tests/components/lamarzocco/test_config_flow.py +++ b/tests/components/lamarzocco/test_config_flow.py @@ -7,12 +7,7 @@ from lmcloud.models import LaMarzoccoDeviceInfo from homeassistant.components.lamarzocco.config_flow import CONF_MACHINE from homeassistant.components.lamarzocco.const import CONF_USE_BLUETOOTH, DOMAIN -from homeassistant.config_entries import ( - SOURCE_BLUETOOTH, - SOURCE_REAUTH, - SOURCE_USER, - ConfigEntryState, -) +from homeassistant.config_entries import SOURCE_BLUETOOTH, SOURCE_USER, ConfigEntryState from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -247,15 +242,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/lametric/test_config_flow.py b/tests/components/lametric/test_config_flow.py index 2a21423ad03..3fbe606c7f1 100644 --- a/tests/components/lametric/test_config_flow.py +++ b/tests/components/lametric/test_config_flow.py @@ -20,12 +20,7 @@ from homeassistant.components.ssdp import ( ATTR_UPNP_SERIAL, SsdpServiceInfo, ) -from homeassistant.config_entries import ( - SOURCE_DHCP, - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_SSDP, SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_DEVICE, CONF_HOST, CONF_MAC from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -753,15 +748,7 @@ async def test_reauth_cloud_import( """Test reauth flow importing api keys from the cloud.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) flow_id = result["flow_id"] @@ -817,15 +804,7 @@ async def test_reauth_cloud_abort_device_not_found( mock_config_entry.add_to_hass(hass) hass.config_entries.async_update_entry(mock_config_entry, unique_id="UKNOWN_DEVICE") - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) flow_id = result["flow_id"] @@ -872,15 +851,7 @@ async def test_reauth_manual( """Test reauth flow with manual entry.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) flow_id = result["flow_id"] @@ -914,15 +885,7 @@ async def test_reauth_manual_sky( """Test reauth flow with manual entry for LaMetric Sky.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) flow_id = result["flow_id"] diff --git a/tests/components/landisgyr_heat_meter/test_init.py b/tests/components/landisgyr_heat_meter/test_init.py index c9768ec681f..76a376e441c 100644 --- a/tests/components/landisgyr_heat_meter/test_init.py +++ b/tests/components/landisgyr_heat_meter/test_init.py @@ -1,6 +1,6 @@ """Test the Landis + Gyr Heat Meter init.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch from homeassistant.components.landisgyr_heat_meter.const import ( DOMAIN as LANDISGYR_HEAT_METER_DOMAIN, @@ -17,7 +17,7 @@ API_HEAT_METER_SERVICE = ( @patch(API_HEAT_METER_SERVICE) -async def test_unload_entry(_, hass: HomeAssistant) -> None: +async def test_unload_entry(mock_meter_service: MagicMock, hass: HomeAssistant) -> None: """Test removing config entry.""" mock_entry_data = { "device": "/dev/USB0", @@ -41,7 +41,9 @@ async def test_unload_entry(_, hass: HomeAssistant) -> None: @patch(API_HEAT_METER_SERVICE) async def test_migrate_entry( - _, hass: HomeAssistant, entity_registry: er.EntityRegistry + mock_meter_service: MagicMock, + hass: HomeAssistant, + entity_registry: er.EntityRegistry, ) -> None: """Test successful migration of entry data from version 1 to 2.""" diff --git a/tests/components/laundrify/test_config_flow.py b/tests/components/laundrify/test_config_flow.py index 69a4b957cf5..8bb8211195c 100644 --- a/tests/components/laundrify/test_config_flow.py +++ b/tests/components/laundrify/test_config_flow.py @@ -3,7 +3,7 @@ from laundrify_aio import exceptions from homeassistant.components.laundrify.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CODE, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -95,9 +95,8 @@ async def test_form_unkown_exception( async def test_step_reauth(hass: HomeAssistant) -> None: """Test the reauth form is shown.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH} - ) + config_entry = create_entry(hass) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/lcn/conftest.py b/tests/components/lcn/conftest.py index 2884bc833c2..b1f28b28465 100644 --- a/tests/components/lcn/conftest.py +++ b/tests/components/lcn/conftest.py @@ -1,7 +1,9 @@ """Test configuration and mocks for LCN component.""" +from collections.abc import AsyncGenerator import json -from unittest.mock import AsyncMock, patch +from typing import Any +from unittest.mock import AsyncMock, Mock, patch import pypck from pypck.connection import PchkConnectionManager @@ -10,8 +12,9 @@ from pypck.module import GroupConnection, ModuleConnection import pytest from homeassistant.components.lcn.const import DOMAIN -from homeassistant.components.lcn.helpers import generate_unique_id -from homeassistant.const import CONF_HOST +from homeassistant.components.lcn.helpers import AddressType, generate_unique_id +from homeassistant.const import CONF_ADDRESS, CONF_DEVICES, CONF_ENTITIES, CONF_HOST +from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -27,7 +30,7 @@ class MockModuleConnection(ModuleConnection): request_name = AsyncMock(return_value="TestModule") send_command = AsyncMock(return_value=True) - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Construct ModuleConnection instance.""" super().__init__(*args, **kwargs) self.serials_request_handler.serial_known.set() @@ -42,13 +45,13 @@ class MockGroupConnection(GroupConnection): class MockPchkConnectionManager(PchkConnectionManager): """Fake connection handler.""" - async def async_connect(self, timeout=30): + async def async_connect(self, timeout: int = 30) -> None: """Mock establishing a connection to PCHK.""" self.authentication_completed_future.set_result(True) self.license_error_future.set_result(True) self.segment_scan_completed_event.set() - async def async_close(self): + async def async_close(self) -> None: """Mock closing a connection to PCHK.""" @patch.object(pypck.connection, "ModuleConnection", MockModuleConnection) @@ -57,13 +60,19 @@ class MockPchkConnectionManager(PchkConnectionManager): """Get LCN address connection.""" return super().get_address_conn(addr, request_serials) + scan_modules = AsyncMock() send_command = AsyncMock() -def create_config_entry(name): +def create_config_entry(name: str) -> MockConfigEntry: """Set up config entries with configuration data.""" fixture_filename = f"lcn/config_entry_{name}.json" entry_data = json.loads(load_fixture(fixture_filename)) + for device in entry_data[CONF_DEVICES]: + device[CONF_ADDRESS] = tuple(device[CONF_ADDRESS]) + for entity in entry_data[CONF_ENTITIES]: + entity[CONF_ADDRESS] = tuple(entity[CONF_ADDRESS]) + options = {} title = entry_data[CONF_HOST] @@ -78,20 +87,23 @@ def create_config_entry(name): @pytest.fixture(name="entry") -def create_config_entry_pchk(): +def create_config_entry_pchk() -> MockConfigEntry: """Return one specific config entry.""" return create_config_entry("pchk") @pytest.fixture(name="entry2") -def create_config_entry_myhome(): +def create_config_entry_myhome() -> MockConfigEntry: """Return one specific config entry.""" return create_config_entry("myhome") @pytest.fixture(name="lcn_connection") -async def init_integration(hass, entry): +async def init_integration( + hass: HomeAssistant, entry: MockConfigEntry +) -> AsyncGenerator[MockPchkConnectionManager]: """Set up the LCN integration in Home Assistant.""" + hass.http = Mock() # needs to be mocked as hass.http.register_static_path is called when registering the frontend lcn_connection = None def lcn_connection_factory(*args, **kwargs): @@ -109,7 +121,7 @@ async def init_integration(hass, entry): yield lcn_connection -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Set up the LCN component.""" fixture_filename = "lcn/config.json" config_data = json.loads(load_fixture(fixture_filename)) @@ -118,7 +130,9 @@ async def setup_component(hass): await hass.async_block_till_done() -def get_device(hass, entry, address): +def get_device( + hass: HomeAssistant, entry: MockConfigEntry, address: AddressType +) -> dr.DeviceEntry: """Get LCN device for specified address.""" device_registry = dr.async_get(hass) identifiers = {(DOMAIN, generate_unique_id(entry.entry_id, address))} diff --git a/tests/components/lcn/fixtures/config_entry_pchk.json b/tests/components/lcn/fixtures/config_entry_pchk.json index 31b51adfce7..08ccd194578 100644 --- a/tests/components/lcn/fixtures/config_entry_pchk.json +++ b/tests/components/lcn/fixtures/config_entry_pchk.json @@ -9,14 +9,14 @@ "devices": [ { "address": [0, 7, false], - "name": "", + "name": "TestModule", "hardware_serial": -1, "software_serial": -1, "hardware_type": -1 }, { "address": [0, 5, true], - "name": "", + "name": "TestGroup", "hardware_serial": -1, "software_serial": -1, "hardware_type": -1 diff --git a/tests/components/lcn/test_config_flow.py b/tests/components/lcn/test_config_flow.py index e1705e4b349..d002c5fe625 100644 --- a/tests/components/lcn/test_config_flow.py +++ b/tests/components/lcn/test_config_flow.py @@ -5,9 +5,11 @@ from unittest.mock import patch from pypck.connection import PchkAuthenticationError, PchkLicenseError import pytest -from homeassistant import config_entries +from homeassistant import config_entries, data_entry_flow +from homeassistant.components.lcn.config_flow import LcnFlowHandler, validate_connection from homeassistant.components.lcn.const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, DOMAIN from homeassistant.const import ( + CONF_BASE, CONF_DEVICES, CONF_ENTITIES, CONF_HOST, @@ -16,25 +18,33 @@ from homeassistant.const import ( CONF_PORT, CONF_USERNAME, ) -from homeassistant.core import HomeAssistant +from homeassistant.core import DOMAIN as HOMEASSISTANT_DOMAIN, HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import issue_registry as ir from tests.common import MockConfigEntry -IMPORT_DATA = { - CONF_HOST: "pchk", +CONFIG_DATA = { CONF_IP_ADDRESS: "127.0.0.1", - CONF_PORT: 4114, + CONF_PORT: 1234, CONF_USERNAME: "lcn", CONF_PASSWORD: "lcn", CONF_SK_NUM_TRIES: 0, CONF_DIM_MODE: "STEPS200", +} + +CONNECTION_DATA = {CONF_HOST: "pchk", **CONFIG_DATA} + +IMPORT_DATA = { + **CONNECTION_DATA, CONF_DEVICES: [], CONF_ENTITIES: [], } -async def test_step_import(hass: HomeAssistant) -> None: +async def test_step_import( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: """Test for import step.""" with ( @@ -46,14 +56,18 @@ async def test_step_import(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY assert result["title"] == "pchk" assert result["data"] == IMPORT_DATA + assert issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) -async def test_step_import_existing_host(hass: HomeAssistant) -> None: +async def test_step_import_existing_host( + hass: HomeAssistant, issue_registry: ir.IssueRegistry +) -> None: """Test for update of config_entry if imported host already exists.""" # Create config entry and add it to hass @@ -67,13 +81,15 @@ async def test_step_import_existing_host(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=imported_data ) - await hass.async_block_till_done() # Check if config entry was updated assert result["type"] is FlowResultType.ABORT assert result["reason"] == "existing_configuration_updated" assert mock_entry.source == config_entries.SOURCE_IMPORT assert mock_entry.data == IMPORT_DATA + assert issue_registry.async_get_issue( + HOMEASSISTANT_DOMAIN, f"deprecated_yaml_{DOMAIN}" + ) @pytest.mark.parametrize( @@ -81,10 +97,12 @@ async def test_step_import_existing_host(hass: HomeAssistant) -> None: [ (PchkAuthenticationError, "authentication_error"), (PchkLicenseError, "license_error"), - (TimeoutError, "connection_timeout"), + (TimeoutError, "connection_refused"), ], ) -async def test_step_import_error(hass: HomeAssistant, error, reason) -> None: +async def test_step_import_error( + hass: HomeAssistant, issue_registry: ir.IssueRegistry, error, reason +) -> None: """Test for error in import is handled correctly.""" with patch( "pypck.connection.PchkConnectionManager.async_connect", side_effect=error @@ -94,7 +112,155 @@ async def test_step_import_error(hass: HomeAssistant, error, reason) -> None: result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_IMPORT}, data=data ) - await hass.async_block_till_done() assert result["type"] is FlowResultType.ABORT assert result["reason"] == reason + assert issue_registry.async_get_issue(DOMAIN, reason) + + +async def test_show_form(hass: HomeAssistant) -> None: + """Test that the form is served with no input.""" + flow = LcnFlowHandler() + flow.hass = hass + + result = await flow.async_step_user(user_input=None) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["step_id"] == "user" + + +async def test_step_user(hass: HomeAssistant) -> None: + """Test for user step.""" + with ( + patch("pypck.connection.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.async_setup", return_value=True), + patch("homeassistant.components.lcn.async_setup_entry", return_value=True), + ): + data = CONNECTION_DATA.copy() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=data + ) + + assert result["type"] == data_entry_flow.FlowResultType.CREATE_ENTRY + assert result["title"] == CONNECTION_DATA[CONF_HOST] + assert result["data"] == { + **CONNECTION_DATA, + CONF_DEVICES: [], + CONF_ENTITIES: [], + } + + +async def test_step_user_existing_host( + hass: HomeAssistant, entry: MockConfigEntry +) -> None: + """Test for user defined host already exists.""" + entry.add_to_hass(hass) + + with patch("pypck.connection.PchkConnectionManager.async_connect"): + config_data = entry.data.copy() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=config_data + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["errors"] == {CONF_BASE: "already_configured"} + + +@pytest.mark.parametrize( + ("error", "errors"), + [ + (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), + (PchkLicenseError, {CONF_BASE: "license_error"}), + (TimeoutError, {CONF_BASE: "connection_refused"}), + ], +) +async def test_step_user_error( + hass: HomeAssistant, error: type[Exception], errors: dict[str, str] +) -> None: + """Test for error in user step is handled correctly.""" + with patch( + "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + ): + data = CONNECTION_DATA.copy() + data.update({CONF_HOST: "pchk"}) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER}, data=data + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["errors"] == errors + + +async def test_step_reconfigure(hass: HomeAssistant, entry: MockConfigEntry) -> None: + """Test for reconfigure step.""" + entry.add_to_hass(hass) + old_entry_data = entry.data.copy() + + with ( + patch("pypck.connection.PchkConnectionManager.async_connect"), + patch("homeassistant.components.lcn.async_setup", return_value=True), + patch("homeassistant.components.lcn.async_setup_entry", return_value=True), + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=CONFIG_DATA.copy(), + ) + + assert result["type"] == data_entry_flow.FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + entry = hass.config_entries.async_get_entry(entry.entry_id) + assert entry.title == CONNECTION_DATA[CONF_HOST] + assert entry.data == {**old_entry_data, **CONFIG_DATA} + + +@pytest.mark.parametrize( + ("error", "errors"), + [ + (PchkAuthenticationError, {CONF_BASE: "authentication_error"}), + (PchkLicenseError, {CONF_BASE: "license_error"}), + (TimeoutError, {CONF_BASE: "connection_refused"}), + ], +) +async def test_step_reconfigure_error( + hass: HomeAssistant, + entry: MockConfigEntry, + error: type[Exception], + errors: dict[str, str], +) -> None: + """Test for error in reconfigure step is handled correctly.""" + entry.add_to_hass(hass) + with patch( + "pypck.connection.PchkConnectionManager.async_connect", side_effect=error + ): + data = {**CONNECTION_DATA, CONF_HOST: "pchk"} + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={ + "source": config_entries.SOURCE_RECONFIGURE, + "entry_id": entry.entry_id, + }, + data=data, + ) + + assert result["type"] == data_entry_flow.FlowResultType.FORM + assert result["errors"] == errors + + +async def test_validate_connection() -> None: + """Test the connection validation.""" + data = CONNECTION_DATA.copy() + + with ( + patch("pypck.connection.PchkConnectionManager.async_connect") as async_connect, + patch("pypck.connection.PchkConnectionManager.async_close") as async_close, + ): + result = await validate_connection(data=data) + + assert async_connect.is_called + assert async_close.is_called + assert result is None diff --git a/tests/components/lcn/test_init.py b/tests/components/lcn/test_init.py index 670735439ce..c118b98ecef 100644 --- a/tests/components/lcn/test_init.py +++ b/tests/components/lcn/test_init.py @@ -1,6 +1,6 @@ """Test init of LCN integration.""" -from unittest.mock import patch +from unittest.mock import Mock, patch from pypck.connection import ( PchkAuthenticationError, @@ -31,6 +31,7 @@ async def test_async_setup_entry(hass: HomeAssistant, entry, lcn_connection) -> async def test_async_setup_multiple_entries(hass: HomeAssistant, entry, entry2) -> None: """Test a successful setup and unload of multiple entries.""" + hass.http = Mock() with patch("pypck.connection.PchkConnectionManager", MockPchkConnectionManager): for config_entry in (entry, entry2): config_entry.add_to_hass(hass) diff --git a/tests/components/lcn/test_websocket.py b/tests/components/lcn/test_websocket.py new file mode 100644 index 00000000000..f1f0a19b572 --- /dev/null +++ b/tests/components/lcn/test_websocket.py @@ -0,0 +1,303 @@ +"""LCN Websocket Tests.""" + +from pypck.lcn_addr import LcnAddr +import pytest + +from homeassistant.components.lcn.const import CONF_DOMAIN_DATA +from homeassistant.components.lcn.helpers import get_device_config, get_resource +from homeassistant.const import ( + CONF_ADDRESS, + CONF_DEVICES, + CONF_DOMAIN, + CONF_ENTITIES, + CONF_NAME, + CONF_RESOURCE, + CONF_TYPE, +) +from homeassistant.core import HomeAssistant + +from tests.typing import WebSocketGenerator + +DEVICES_PAYLOAD = {CONF_TYPE: "lcn/devices", "entry_id": ""} +ENTITIES_PAYLOAD = { + CONF_TYPE: "lcn/entities", + "entry_id": "", +} +SCAN_PAYLOAD = {CONF_TYPE: "lcn/devices/scan", "entry_id": ""} +DEVICES_ADD_PAYLOAD = { + CONF_TYPE: "lcn/devices/add", + "entry_id": "", + CONF_ADDRESS: (0, 10, False), +} +DEVICES_DELETE_PAYLOAD = { + CONF_TYPE: "lcn/devices/delete", + "entry_id": "", + CONF_ADDRESS: (0, 7, False), +} +ENTITIES_ADD_PAYLOAD = { + CONF_TYPE: "lcn/entities/add", + "entry_id": "", + CONF_ADDRESS: (0, 7, False), + CONF_NAME: "test_switch", + CONF_DOMAIN: "switch", + CONF_DOMAIN_DATA: {"output": "RELAY5"}, +} +ENTITIES_DELETE_PAYLOAD = { + CONF_TYPE: "lcn/entities/delete", + "entry_id": "", + CONF_ADDRESS: (0, 7, False), + CONF_DOMAIN: "switch", + CONF_RESOURCE: "relay1", +} + + +async def test_lcn_devices_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/devices command.""" + client = await hass_ws_client(hass) + + await client.send_json_auto_id({**DEVICES_PAYLOAD, "entry_id": entry.entry_id}) + + res = await client.receive_json() + assert res["success"], res + assert len(res["result"]) == len(entry.data[CONF_DEVICES]) + assert all( + {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} + in entry.data[CONF_DEVICES] + for result in res["result"] + ) + + +@pytest.mark.parametrize( + "payload", + [ + ENTITIES_PAYLOAD, + {**ENTITIES_PAYLOAD, CONF_ADDRESS: (0, 7, False)}, + ], +) +async def test_lcn_entities_command( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entry, + lcn_connection, + payload, +) -> None: + """Test lcn/entities command.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + **payload, + "entry_id": entry.entry_id, + } + ) + + res = await client.receive_json() + assert res["success"], res + entities = [ + entity + for entity in entry.data[CONF_ENTITIES] + if CONF_ADDRESS not in payload or entity[CONF_ADDRESS] == payload[CONF_ADDRESS] + ] + assert len(res["result"]) == len(entities) + assert all( + {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} in entities + for result in res["result"] + ) + + +async def test_lcn_devices_scan_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/devices/scan command.""" + # add new module which is not stored in config_entry + lcn_connection.get_address_conn(LcnAddr(0, 10, False)) + + client = await hass_ws_client(hass) + await client.send_json_auto_id({**SCAN_PAYLOAD, "entry_id": entry.entry_id}) + + res = await client.receive_json() + assert res["success"], res + + lcn_connection.scan_modules.assert_awaited() + assert len(res["result"]) == len(entry.data[CONF_DEVICES]) + assert all( + {**result, CONF_ADDRESS: tuple(result[CONF_ADDRESS])} + in entry.data[CONF_DEVICES] + for result in res["result"] + ) + + +async def test_lcn_devices_add_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/devices/add command.""" + client = await hass_ws_client(hass) + assert get_device_config((0, 10, False), entry) is None + + await client.send_json_auto_id({**DEVICES_ADD_PAYLOAD, "entry_id": entry.entry_id}) + + res = await client.receive_json() + assert res["success"], res + + assert get_device_config((0, 10, False), entry) + + +async def test_lcn_devices_delete_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/devices/delete command.""" + client = await hass_ws_client(hass) + assert get_device_config((0, 7, False), entry) + + await client.send_json_auto_id( + {**DEVICES_DELETE_PAYLOAD, "entry_id": entry.entry_id} + ) + + res = await client.receive_json() + assert res["success"], res + assert get_device_config((0, 7, False), entry) is None + + +async def test_lcn_entities_add_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/entities/add command.""" + client = await hass_ws_client(hass) + + entity_config = { + key: ENTITIES_ADD_PAYLOAD[key] + for key in (CONF_ADDRESS, CONF_NAME, CONF_DOMAIN, CONF_DOMAIN_DATA) + } + + resource = get_resource( + ENTITIES_ADD_PAYLOAD[CONF_DOMAIN], ENTITIES_ADD_PAYLOAD[CONF_DOMAIN_DATA] + ).lower() + + assert {**entity_config, CONF_RESOURCE: resource} not in entry.data[CONF_ENTITIES] + + await client.send_json_auto_id({**ENTITIES_ADD_PAYLOAD, "entry_id": entry.entry_id}) + + res = await client.receive_json() + assert res["success"], res + + assert {**entity_config, CONF_RESOURCE: resource} in entry.data[CONF_ENTITIES] + + +async def test_lcn_entities_delete_command( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator, entry, lcn_connection +) -> None: + """Test lcn/entities/delete command.""" + client = await hass_ws_client(hass) + + assert ( + len( + [ + entity + for entity in entry.data[CONF_ENTITIES] + if entity[CONF_ADDRESS] == ENTITIES_DELETE_PAYLOAD[CONF_ADDRESS] + and entity[CONF_DOMAIN] == ENTITIES_DELETE_PAYLOAD[CONF_DOMAIN] + and entity[CONF_RESOURCE] == ENTITIES_DELETE_PAYLOAD[CONF_RESOURCE] + ] + ) + == 1 + ) + + await client.send_json_auto_id( + {**ENTITIES_DELETE_PAYLOAD, "entry_id": entry.entry_id} + ) + + res = await client.receive_json() + assert res["success"], res + + assert ( + len( + [ + entity + for entity in entry.data[CONF_ENTITIES] + if entity[CONF_ADDRESS] == ENTITIES_DELETE_PAYLOAD[CONF_ADDRESS] + and entity[CONF_DOMAIN] == ENTITIES_DELETE_PAYLOAD[CONF_DOMAIN] + and entity[CONF_RESOURCE] == ENTITIES_DELETE_PAYLOAD[CONF_RESOURCE] + ] + ) + == 0 + ) + + +@pytest.mark.parametrize( + ("payload", "entity_id", "result"), + [ + (DEVICES_PAYLOAD, "12345", False), + (ENTITIES_PAYLOAD, "12345", False), + (SCAN_PAYLOAD, "12345", False), + (DEVICES_ADD_PAYLOAD, "12345", False), + (DEVICES_DELETE_PAYLOAD, "12345", False), + (ENTITIES_ADD_PAYLOAD, "12345", False), + (ENTITIES_DELETE_PAYLOAD, "12345", False), + ], +) +async def test_lcn_command_host_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + lcn_connection, + payload, + entity_id, + result, +) -> None: + """Test lcn commands for unknown host.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id({**payload, "entry_id": entity_id}) + + res = await client.receive_json() + assert res["success"], res + assert res["result"] == result + + +@pytest.mark.parametrize( + ("payload", "address", "result"), + [ + (DEVICES_ADD_PAYLOAD, (0, 7, False), False), # device already existing + (DEVICES_DELETE_PAYLOAD, (0, 42, False), False), + (ENTITIES_ADD_PAYLOAD, (0, 42, False), False), + (ENTITIES_DELETE_PAYLOAD, (0, 42, 0), False), + ], +) +async def test_lcn_command_address_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entry, + lcn_connection, + payload, + address, + result, +) -> None: + """Test lcn commands for address error.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + {**payload, "entry_id": entry.entry_id, CONF_ADDRESS: address} + ) + + res = await client.receive_json() + assert res["success"], res + assert res["result"] == result + + +async def test_lcn_entities_add_existing_error( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + entry, + lcn_connection, +) -> None: + """Test lcn commands for address error.""" + client = await hass_ws_client(hass) + await client.send_json_auto_id( + { + **ENTITIES_ADD_PAYLOAD, + "entry_id": entry.entry_id, + CONF_DOMAIN_DATA: {"output": "RELAY1"}, + } + ) + + res = await client.receive_json() + assert res["success"], res + assert res["result"] is False diff --git a/tests/components/lg_netcast/__init__.py b/tests/components/lg_netcast/__init__.py index ce3e09aeb65..6e608ae207b 100644 --- a/tests/components/lg_netcast/__init__.py +++ b/tests/components/lg_netcast/__init__.py @@ -1,7 +1,7 @@ """Tests for LG Netcast TV.""" from unittest.mock import patch -from xml.etree import ElementTree +import xml.etree.ElementTree as ET from pylgnetcast import AccessTokenError, LgNetCastClient, SessionIdError import requests @@ -56,7 +56,7 @@ def _patched_lgnetcast_client( if always_404: return None if invalid_details: - raise ElementTree.ParseError("Mocked Parsed Error") + raise ET.ParseError("Mocked Parsed Error") return { "uuid": UNIQUE_ID if not no_unique_id else None, "model_name": MODEL_NAME, diff --git a/tests/components/lg_soundbar/test_config_flow.py b/tests/components/lg_soundbar/test_config_flow.py index 806c993e792..01e16ecb8d0 100644 --- a/tests/components/lg_soundbar/test_config_flow.py +++ b/tests/components/lg_soundbar/test_config_flow.py @@ -5,7 +5,7 @@ from __future__ import annotations from collections.abc import Callable import socket from typing import Any -from unittest.mock import DEFAULT, patch +from unittest.mock import DEFAULT, MagicMock, patch from homeassistant import config_entries from homeassistant.components.lg_soundbar.const import DEFAULT_PORT, DOMAIN @@ -17,8 +17,12 @@ from tests.common import MockConfigEntry def setup_mock_temescal( - hass, mock_temescal, mac_info_dev=None, product_info=None, info=None -): + hass: HomeAssistant, + mock_temescal: MagicMock, + mac_info_dev: dict[str, Any] | None = None, + product_info: dict[str, Any] | None = None, + info: dict[str, Any] | None = None, +) -> None: """Set up a mock of the temescal object to craft our expected responses.""" tmock = mock_temescal.temescal instance = tmock.return_value diff --git a/tests/components/lidarr/test_config_flow.py b/tests/components/lidarr/test_config_flow.py index e44b03cd2a2..0097e66fe24 100644 --- a/tests/components/lidarr/test_config_flow.py +++ b/tests/components/lidarr/test_config_flow.py @@ -1,13 +1,15 @@ """Test Lidarr config flow.""" from homeassistant.components.lidarr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import CONF_DATA, MOCK_INPUT, ComponentSetup +from tests.common import MockConfigEntry + async def test_flow_user_form(hass: HomeAssistant, connection) -> None: """Test that the user set up form is served.""" @@ -95,20 +97,14 @@ async def test_flow_user_unknown_error(hass: HomeAssistant, unknown) -> None: async def test_flow_reauth( - hass: HomeAssistant, setup_integration: ComponentSetup, connection + hass: HomeAssistant, + setup_integration: ComponentSetup, + connection, + config_entry: MockConfigEntry, ) -> None: """Test reauth.""" await setup_integration() - entry = hass.config_entries.async_entries(DOMAIN)[0] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure( @@ -123,4 +119,4 @@ async def test_flow_reauth( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" - assert entry.data[CONF_API_KEY] == "abc123" + assert config_entry.data[CONF_API_KEY] == "abc123" diff --git a/tests/components/lifx/__init__.py b/tests/components/lifx/__init__.py index 4834e486ec0..432e7673db6 100644 --- a/tests/components/lifx/__init__.py +++ b/tests/components/lifx/__init__.py @@ -4,6 +4,7 @@ from __future__ import annotations import asyncio from contextlib import contextmanager +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch from aiolifx.aiolifx import Light @@ -25,7 +26,7 @@ DEFAULT_ENTRY_TITLE = LABEL class MockMessage: """Mock a lifx message.""" - def __init__(self, **kwargs): + def __init__(self, **kwargs: Any) -> None: """Init message.""" self.target_addr = SERIAL self.count = 9 @@ -37,7 +38,7 @@ class MockMessage: class MockFailingLifxCommand: """Mock a lifx command that fails.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] @@ -60,7 +61,7 @@ class MockLifxCommand: """Return name.""" return "mock_lifx_command" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] @@ -212,7 +213,7 @@ def _patch_device(device: Light | None = None, no_device: bool = False): class MockLifxConnecton: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" if no_device: self.device = _mocked_failing_bulb() @@ -240,7 +241,7 @@ def _patch_discovery(device: Light | None = None, no_device: bool = False): class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" if no_device: self.lights = {} @@ -276,7 +277,7 @@ def _patch_config_flow_try_connect( class MockLifxConnection: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" if no_device: self.device = _mocked_failing_bulb() diff --git a/tests/components/lifx/conftest.py b/tests/components/lifx/conftest.py index 5cb7c702f43..e4a5f303f61 100644 --- a/tests/components/lifx/conftest.py +++ b/tests/components/lifx/conftest.py @@ -1,5 +1,6 @@ """Tests for the lifx integration.""" +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -21,7 +22,7 @@ def mock_effect_conductor(): """Mock the effect conductor.""" class MockConductor: - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock the conductor.""" self.start = AsyncMock() self.stop = AsyncMock() diff --git a/tests/components/lifx/test_config_flow.py b/tests/components/lifx/test_config_flow.py index 59b7090788a..29324d0d19a 100644 --- a/tests/components/lifx/test_config_flow.py +++ b/tests/components/lifx/test_config_flow.py @@ -2,6 +2,7 @@ from ipaddress import ip_address import socket +from typing import Any from unittest.mock import patch import pytest @@ -288,7 +289,7 @@ async def test_manual_dns_error(hass: HomeAssistant) -> None: class MockLifxConnectonDnsError: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" self.device = _mocked_failing_bulb() @@ -574,7 +575,7 @@ async def test_suggested_area( class MockLifxCommandGetGroup: """Mock the get_group method that gets the group name from the bulb.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.lifx_group = kwargs.get("lifx_group") diff --git a/tests/components/lifx/test_init.py b/tests/components/lifx/test_init.py index 42ece68a2c5..66adc54704e 100644 --- a/tests/components/lifx/test_init.py +++ b/tests/components/lifx/test_init.py @@ -4,6 +4,7 @@ from __future__ import annotations from datetime import timedelta import socket +from typing import Any from unittest.mock import patch import pytest @@ -37,7 +38,7 @@ async def test_configuring_lifx_causes_discovery(hass: HomeAssistant) -> None: class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" discovered = _mocked_bulb() self.lights = {discovered.mac_addr: discovered} @@ -137,7 +138,7 @@ async def test_dns_error_at_startup(hass: HomeAssistant) -> None: class MockLifxConnectonDnsError: """Mock lifx connection with a dns error.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init connection.""" self.device = bulb diff --git a/tests/components/lifx/test_light.py b/tests/components/lifx/test_light.py index 9972bc1021a..a642347b4e6 100644 --- a/tests/components/lifx/test_light.py +++ b/tests/components/lifx/test_light.py @@ -1,6 +1,7 @@ """Tests for the lifx integration light platform.""" from datetime import timedelta +from typing import Any from unittest.mock import patch import aiolifx_effects @@ -1299,7 +1300,7 @@ async def test_config_zoned_light_strip_fails( class MockFailingLifxCommand: """Mock a lifx command that fails on the 2nd try.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.call_count = 0 @@ -1338,7 +1339,7 @@ async def test_legacy_zoned_light_strip( class MockPopulateLifxZonesCommand: """Mock populating the number of zones.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.call_count = 0 @@ -1845,7 +1846,7 @@ async def test_color_bulb_is_actually_off(hass: HomeAssistant) -> None: class MockLifxCommandActuallyOff: """Mock a lifx command that will update our power level state.""" - def __init__(self, bulb, **kwargs): + def __init__(self, bulb, **kwargs: Any) -> None: """Init command.""" self.bulb = bulb self.calls = [] diff --git a/tests/components/lifx/test_migration.py b/tests/components/lifx/test_migration.py index e5b2f9f8167..f984acce238 100644 --- a/tests/components/lifx/test_migration.py +++ b/tests/components/lifx/test_migration.py @@ -3,6 +3,7 @@ from __future__ import annotations from datetime import timedelta +from typing import Any from unittest.mock import patch from homeassistant import setup @@ -114,7 +115,7 @@ async def test_discovery_is_more_frequent_during_migration( class MockLifxDiscovery: """Mock lifx discovery.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Init discovery.""" self.bulb = bulb self.lights = {} diff --git a/tests/components/light/common.py b/tests/components/light/common.py index 4c3e95b5ef9..0ad492a31e9 100644 --- a/tests/components/light/common.py +++ b/tests/components/light/common.py @@ -33,6 +33,7 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass from tests.common import MockToggleEntity @@ -40,24 +41,24 @@ from tests.common import MockToggleEntity @bind_hass def turn_on( - hass, - entity_id=ENTITY_MATCH_ALL, - transition=None, - brightness=None, - brightness_pct=None, - rgb_color=None, - rgbw_color=None, - rgbww_color=None, - xy_color=None, - hs_color=None, - color_temp=None, - kelvin=None, - profile=None, - flash=None, - effect=None, - color_name=None, - white=None, -): + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + brightness: int | None = None, + brightness_pct: float | None = None, + rgb_color: tuple[int, int, int] | None = None, + rgbw_color: tuple[int, int, int, int] | None = None, + rgbww_color: tuple[int, int, int, int, int] | None = None, + xy_color: tuple[float, float] | None = None, + hs_color: tuple[float, float] | None = None, + color_temp: int | None = None, + kelvin: int | None = None, + profile: str | None = None, + flash: str | None = None, + effect: str | None = None, + color_name: str | None = None, + white: bool | None = None, +) -> None: """Turn all or specified light on.""" hass.add_job( async_turn_on, @@ -82,24 +83,24 @@ def turn_on( async def async_turn_on( - hass, - entity_id=ENTITY_MATCH_ALL, - transition=None, - brightness=None, - brightness_pct=None, - rgb_color=None, - rgbw_color=None, - rgbww_color=None, - xy_color=None, - hs_color=None, - color_temp=None, - kelvin=None, - profile=None, - flash=None, - effect=None, - color_name=None, - white=None, -): + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + brightness: int | None = None, + brightness_pct: float | None = None, + rgb_color: tuple[int, int, int] | None = None, + rgbw_color: tuple[int, int, int, int] | None = None, + rgbww_color: tuple[int, int, int, int, int] | None = None, + xy_color: tuple[float, float] | None = None, + hs_color: tuple[float, float] | None = None, + color_temp: int | None = None, + kelvin: int | None = None, + profile: str | None = None, + flash: str | None = None, + effect: str | None = None, + color_name: str | None = None, + white: bool | None = None, +) -> None: """Turn all or specified light on.""" data = { key: value @@ -128,12 +129,22 @@ async def async_turn_on( @bind_hass -def turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None, flash=None): +def turn_off( + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + flash: str | None = None, +) -> None: """Turn all or specified light off.""" hass.add_job(async_turn_off, hass, entity_id, transition, flash) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None, flash=None): +async def async_turn_off( + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + flash: str | None = None, +) -> None: """Turn all or specified light off.""" data = { key: value @@ -150,21 +161,21 @@ async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL, transition=None, flas @bind_hass def toggle( - hass, - entity_id=ENTITY_MATCH_ALL, - transition=None, - brightness=None, - brightness_pct=None, - rgb_color=None, - xy_color=None, - hs_color=None, - color_temp=None, - kelvin=None, - profile=None, - flash=None, - effect=None, - color_name=None, -): + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + brightness: int | None = None, + brightness_pct: float | None = None, + rgb_color: tuple[int, int, int] | None = None, + xy_color: tuple[float, float] | None = None, + hs_color: tuple[float, float] | None = None, + color_temp: int | None = None, + kelvin: int | None = None, + profile: str | None = None, + flash: str | None = None, + effect: str | None = None, + color_name: str | None = None, +) -> None: """Toggle all or specified light.""" hass.add_job( async_toggle, @@ -186,21 +197,21 @@ def toggle( async def async_toggle( - hass, - entity_id=ENTITY_MATCH_ALL, - transition=None, - brightness=None, - brightness_pct=None, - rgb_color=None, - xy_color=None, - hs_color=None, - color_temp=None, - kelvin=None, - profile=None, - flash=None, - effect=None, - color_name=None, -): + hass: HomeAssistant, + entity_id: str = ENTITY_MATCH_ALL, + transition: float | None = None, + brightness: int | None = None, + brightness_pct: float | None = None, + rgb_color: tuple[int, int, int] | None = None, + xy_color: tuple[float, float] | None = None, + hs_color: tuple[float, float] | None = None, + color_temp: int | None = None, + kelvin: int | None = None, + profile: str | None = None, + flash: str | None = None, + effect: str | None = None, + color_name: str | None = None, +) -> None: """Turn all or specified light on.""" data = { key: value diff --git a/tests/components/light/conftest.py b/tests/components/light/conftest.py index 12bd62edcb7..58f2d23db95 100644 --- a/tests/components/light/conftest.py +++ b/tests/components/light/conftest.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.light import Profiles +from homeassistant.core import HomeAssistant @pytest.fixture(autouse=True) @@ -12,7 +13,7 @@ def mock_light_profiles(): """Mock loading of profiles.""" data = {} - def mock_profiles_class(hass): + def mock_profiles_class(hass: HomeAssistant) -> Profiles: profiles = Profiles(hass) profiles.data = data profiles.async_initialize = AsyncMock() diff --git a/tests/components/linear_garage_door/test_config_flow.py b/tests/components/linear_garage_door/test_config_flow.py index 4599bd24aef..64bdc589194 100644 --- a/tests/components/linear_garage_door/test_config_flow.py +++ b/tests/components/linear_garage_door/test_config_flow.py @@ -6,7 +6,7 @@ from linear_garage_door.errors import InvalidLoginError import pytest from homeassistant.components.linear_garage_door.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_EMAIL, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -61,16 +61,7 @@ async def test_reauth( ) -> None: """Test reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/linkplay/conftest.py b/tests/components/linkplay/conftest.py index b3d65422e08..be83dd2412d 100644 --- a/tests/components/linkplay/conftest.py +++ b/tests/components/linkplay/conftest.py @@ -3,6 +3,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, patch +from aiohttp import ClientSession from linkplay.bridge import LinkPlayBridge, LinkPlayDevice import pytest @@ -14,11 +15,15 @@ NAME = "Smart Zone 1_54B9" @pytest.fixture def mock_linkplay_factory_bridge() -> Generator[AsyncMock]: - """Mock for linkplay_factory_bridge.""" + """Mock for linkplay_factory_httpapi_bridge.""" with ( patch( - "homeassistant.components.linkplay.config_flow.linkplay_factory_bridge" + "homeassistant.components.linkplay.config_flow.async_get_client_session", + return_value=AsyncMock(spec=ClientSession), + ), + patch( + "homeassistant.components.linkplay.config_flow.linkplay_factory_httpapi_bridge", ) as factory, ): bridge = AsyncMock(spec=LinkPlayBridge) diff --git a/tests/components/linkplay/test_config_flow.py b/tests/components/linkplay/test_config_flow.py index 641f09893c2..3fd1fbea95e 100644 --- a/tests/components/linkplay/test_config_flow.py +++ b/tests/components/linkplay/test_config_flow.py @@ -3,6 +3,9 @@ from ipaddress import ip_address from unittest.mock import AsyncMock +from linkplay.exceptions import LinkPlayRequestException +import pytest + from homeassistant.components.linkplay.const import DOMAIN from homeassistant.components.zeroconf import ZeroconfServiceInfo from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF @@ -47,10 +50,9 @@ ZEROCONF_DISCOVERY_RE_ENTRY = ZeroconfServiceInfo( ) +@pytest.mark.usefixtures("mock_linkplay_factory_bridge", "mock_setup_entry") async def test_user_flow( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test user setup config flow.""" result = await hass.config_entries.flow.async_init( @@ -74,10 +76,9 @@ async def test_user_flow( assert result["result"].unique_id == UUID +@pytest.mark.usefixtures("mock_linkplay_factory_bridge") async def test_user_flow_re_entry( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test user setup config flow when an entry with the same unique id already exists.""" @@ -105,10 +106,9 @@ async def test_user_flow_re_entry( assert result["reason"] == "already_configured" +@pytest.mark.usefixtures("mock_linkplay_factory_bridge", "mock_setup_entry") async def test_zeroconf_flow( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test Zeroconf flow.""" result = await hass.config_entries.flow.async_init( @@ -133,10 +133,9 @@ async def test_zeroconf_flow( assert result["result"].unique_id == UUID +@pytest.mark.usefixtures("mock_linkplay_factory_bridge") async def test_zeroconf_flow_re_entry( hass: HomeAssistant, - mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test Zeroconf flow when an entry with the same unique id already exists.""" @@ -160,16 +159,35 @@ async def test_zeroconf_flow_re_entry( assert result["reason"] == "already_configured" -async def test_flow_errors( +@pytest.mark.usefixtures("mock_setup_entry") +async def test_zeroconf_flow_errors( + hass: HomeAssistant, + mock_linkplay_factory_bridge: AsyncMock, +) -> None: + """Test flow when the device discovered through Zeroconf cannot be reached.""" + + # Temporarily make the mock_linkplay_factory_bridge throw an exception + mock_linkplay_factory_bridge.side_effect = (LinkPlayRequestException("Error"),) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=ZEROCONF_DISCOVERY, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "cannot_connect" + + +@pytest.mark.usefixtures("mock_setup_entry") +async def test_user_flow_errors( hass: HomeAssistant, mock_linkplay_factory_bridge: AsyncMock, - mock_setup_entry: AsyncMock, ) -> None: """Test flow when the device cannot be reached.""" - # Temporarily store bridge in a separate variable and set factory to return None - bridge = mock_linkplay_factory_bridge.return_value - mock_linkplay_factory_bridge.return_value = None + # Temporarily make the mock_linkplay_factory_bridge throw an exception + mock_linkplay_factory_bridge.side_effect = (LinkPlayRequestException("Error"),) result = await hass.config_entries.flow.async_init( DOMAIN, @@ -188,8 +206,8 @@ async def test_flow_errors( assert result["step_id"] == "user" assert result["errors"] == {"base": "cannot_connect"} - # Make linkplay_factory_bridge return a mock bridge again - mock_linkplay_factory_bridge.return_value = bridge + # Make mock_linkplay_factory_bridge_exception no longer throw an exception + mock_linkplay_factory_bridge.side_effect = None result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/litejet/test_trigger.py b/tests/components/litejet/test_trigger.py index b4374652955..c13fda9068c 100644 --- a/tests/components/litejet/test_trigger.py +++ b/tests/components/litejet/test_trigger.py @@ -2,8 +2,9 @@ from datetime import timedelta import logging +from typing import Any from unittest import mock -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest @@ -30,7 +31,9 @@ ENTITY_OTHER_SWITCH = "switch.mock_switch_2" ENTITY_OTHER_SWITCH_NUMBER = 2 -async def simulate_press(hass, mock_litejet, number): +async def simulate_press( + hass: HomeAssistant, mock_litejet: MagicMock, number: int +) -> None: """Test to simulate a press.""" _LOGGER.info("*** simulate press of %d", number) callback = mock_litejet.switch_pressed_callbacks.get(number) @@ -43,7 +46,9 @@ async def simulate_press(hass, mock_litejet, number): await hass.async_block_till_done() -async def simulate_release(hass, mock_litejet, number): +async def simulate_release( + hass: HomeAssistant, mock_litejet: MagicMock, number: int +) -> None: """Test to simulate releasing.""" _LOGGER.info("*** simulate release of %d", number) callback = mock_litejet.switch_released_callbacks.get(number) @@ -56,7 +61,9 @@ async def simulate_release(hass, mock_litejet, number): await hass.async_block_till_done() -async def simulate_time(hass, mock_litejet, delta): +async def simulate_time( + hass: HomeAssistant, mock_litejet: MagicMock, delta: timedelta +) -> None: """Test to simulate time.""" _LOGGER.info( "*** simulate time change by %s: %s", delta, mock_litejet.start_time + delta @@ -72,7 +79,7 @@ async def simulate_time(hass, mock_litejet, delta): _LOGGER.info("*** done with now=%s", dt_util.utcnow()) -async def setup_automation(hass, trigger): +async def setup_automation(hass: HomeAssistant, trigger: dict[str, Any]) -> None: """Test setting up the automation.""" await async_init_integration(hass, use_switch=True) assert await setup.async_setup_component( @@ -95,7 +102,7 @@ async def setup_automation(hass, trigger): async def test_simple( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -110,7 +117,7 @@ async def test_simple( async def test_only_release( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test the simplest form of a LiteJet trigger.""" await setup_automation( @@ -123,7 +130,7 @@ async def test_only_release( async def test_held_more_than_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test a too short hold.""" await setup_automation( @@ -142,7 +149,7 @@ async def test_held_more_than_short( async def test_held_more_than_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test a hold that is long enough.""" await setup_automation( @@ -164,7 +171,7 @@ async def test_held_more_than_long( async def test_held_less_than_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test a hold that is short enough.""" await setup_automation( @@ -185,7 +192,7 @@ async def test_held_less_than_short( async def test_held_less_than_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test a hold that is too long.""" await setup_automation( @@ -206,7 +213,7 @@ async def test_held_less_than_long( async def test_held_in_range_short( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test an in-range trigger with a too short hold.""" await setup_automation( @@ -226,7 +233,7 @@ async def test_held_in_range_short( async def test_held_in_range_just_right( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test an in-range trigger with a just right hold.""" await setup_automation( @@ -249,7 +256,7 @@ async def test_held_in_range_just_right( async def test_held_in_range_long( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test an in-range trigger with a too long hold.""" await setup_automation( @@ -271,7 +278,7 @@ async def test_held_in_range_long( async def test_reload( - hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet + hass: HomeAssistant, service_calls: list[ServiceCall], mock_litejet: MagicMock ) -> None: """Test reloading automation.""" await setup_automation( diff --git a/tests/components/litterrobot/test_config_flow.py b/tests/components/litterrobot/test_config_flow.py index 5ffb78c7782..9420d3cb8a8 100644 --- a/tests/components/litterrobot/test_config_flow.py +++ b/tests/components/litterrobot/test_config_flow.py @@ -7,7 +7,7 @@ from pylitterbot.exceptions import LitterRobotException, LitterRobotLoginExcepti from homeassistant import config_entries from homeassistant.components import litterrobot -from homeassistant.const import CONF_PASSWORD, CONF_SOURCE +from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,15 +124,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_account: Account) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -164,15 +156,7 @@ async def test_step_reauth_failed(hass: HomeAssistant, mock_account: Account) -> ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/local_ip/test_init.py b/tests/components/local_ip/test_init.py index 51e0628a417..7f411ea9cd7 100644 --- a/tests/components/local_ip/test_init.py +++ b/tests/components/local_ip/test_init.py @@ -2,7 +2,7 @@ from __future__ import annotations -from homeassistant.components.local_ip import DOMAIN +from homeassistant.components.local_ip.const import DOMAIN from homeassistant.components.network import MDNS_TARGET_IP, async_get_source_ip from homeassistant.config_entries import ConfigEntryState from homeassistant.core import HomeAssistant diff --git a/tests/components/locative/test_init.py b/tests/components/locative/test_init.py index 305497ebbd6..8fd239ee398 100644 --- a/tests/components/locative/test_init.py +++ b/tests/components/locative/test_init.py @@ -38,7 +38,7 @@ async def locative_client( @pytest.fixture -async def webhook_id(hass, locative_client): +async def webhook_id(hass: HomeAssistant, locative_client: TestClient) -> str: """Initialize the Geofency component and get the webhook_id.""" await async_process_ha_core_config( hass, @@ -56,7 +56,7 @@ async def webhook_id(hass, locative_client): return result["result"].data["webhook_id"] -async def test_missing_data(locative_client, webhook_id) -> None: +async def test_missing_data(locative_client: TestClient, webhook_id: str) -> None: """Test missing data.""" url = f"/api/webhook/{webhook_id}" @@ -116,7 +116,9 @@ async def test_missing_data(locative_client, webhook_id) -> None: assert req.status == HTTPStatus.UNPROCESSABLE_ENTITY -async def test_enter_and_exit(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_enter_and_exit( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test when there is a known zone.""" url = f"/api/webhook/{webhook_id}" @@ -186,7 +188,7 @@ async def test_enter_and_exit(hass: HomeAssistant, locative_client, webhook_id) async def test_exit_after_enter( - hass: HomeAssistant, locative_client, webhook_id + hass: HomeAssistant, locative_client: TestClient, webhook_id: str ) -> None: """Test when an exit message comes after an enter message.""" url = f"/api/webhook/{webhook_id}" @@ -229,7 +231,9 @@ async def test_exit_after_enter( assert state.state == "work" -async def test_exit_first(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_exit_first( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test when an exit message is sent first on a new device.""" url = f"/api/webhook/{webhook_id}" @@ -250,7 +254,9 @@ async def test_exit_first(hass: HomeAssistant, locative_client, webhook_id) -> N assert state.state == "not_home" -async def test_two_devices(hass: HomeAssistant, locative_client, webhook_id) -> None: +async def test_two_devices( + hass: HomeAssistant, locative_client: TestClient, webhook_id: str +) -> None: """Test updating two different devices.""" url = f"/api/webhook/{webhook_id}" @@ -294,7 +300,7 @@ async def test_two_devices(hass: HomeAssistant, locative_client, webhook_id) -> reason="The device_tracker component does not support unloading yet." ) async def test_load_unload_entry( - hass: HomeAssistant, locative_client, webhook_id + hass: HomeAssistant, locative_client: TestClient, webhook_id: str ) -> None: """Test that the appropriate dispatch signals are added and removed.""" url = f"/api/webhook/{webhook_id}" diff --git a/tests/components/logbook/common.py b/tests/components/logbook/common.py index 67f12955581..afa8b7fcde5 100644 --- a/tests/components/logbook/common.py +++ b/tests/components/logbook/common.py @@ -7,7 +7,7 @@ from typing import Any from homeassistant.components import logbook from homeassistant.components.logbook import processor -from homeassistant.components.logbook.models import LogbookConfig +from homeassistant.components.logbook.models import EventAsRow, LogbookConfig from homeassistant.components.recorder.models import ( process_timestamp_to_utc_isoformat, ulid_to_bytes_or_none, @@ -18,6 +18,8 @@ from homeassistant.helpers import entity_registry as er from homeassistant.helpers.json import JSONEncoder import homeassistant.util.dt as dt_util +IDX_TO_NAME = dict(enumerate(EventAsRow._fields)) + class MockRow: """Minimal row mock.""" @@ -48,6 +50,10 @@ class MockRow: self.attributes = None self.context_only = False + def __getitem__(self, idx: int) -> Any: + """Get item.""" + return getattr(self, IDX_TO_NAME[idx]) + @property def time_fired_minute(self): """Minute the event was fired.""" @@ -73,7 +79,7 @@ def mock_humanify(hass_, rows): event_cache, entity_name_cache, include_entity_name=True, - format_time=processor._row_time_fired_isoformat, + timestamp=False, ) context_augmenter = processor.ContextAugmenter(logbook_run) return list( diff --git a/tests/components/logbook/test_init.py b/tests/components/logbook/test_init.py index 3534192a43e..606c398c31f 100644 --- a/tests/components/logbook/test_init.py +++ b/tests/components/logbook/test_init.py @@ -1,11 +1,9 @@ """The tests for the logbook component.""" import asyncio -import collections from collections.abc import Callable from datetime import datetime, timedelta from http import HTTPStatus -import json from unittest.mock import Mock from freezegun import freeze_time @@ -15,7 +13,7 @@ import voluptuous as vol from homeassistant.components import logbook, recorder from homeassistant.components.alexa.smart_home import EVENT_ALEXA_SMART_HOME from homeassistant.components.automation import EVENT_AUTOMATION_TRIGGERED -from homeassistant.components.logbook.models import LazyEventPartialState +from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState from homeassistant.components.logbook.processor import EventProcessor from homeassistant.components.logbook.queries.common import PSEUDO_EVENT_STATE_CHANGED from homeassistant.components.recorder import Recorder @@ -44,7 +42,6 @@ import homeassistant.core as ha from homeassistant.core import Event, HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entityfilter import CONF_ENTITY_GLOBS -from homeassistant.helpers.json import JSONEncoder from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -68,12 +65,12 @@ async def hass_(recorder_mock: Recorder, hass: HomeAssistant) -> HomeAssistant: @pytest.fixture -async def set_utc(hass): +async def set_utc(hass: HomeAssistant) -> None: """Set timezone to UTC.""" await hass.config.async_set_time_zone("UTC") -async def test_service_call_create_logbook_entry(hass_) -> None: +async def test_service_call_create_logbook_entry(hass_: HomeAssistant) -> None: """Test if service call create log book entry.""" calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY) @@ -126,8 +123,9 @@ async def test_service_call_create_logbook_entry(hass_) -> None: assert last_call.data.get(logbook.ATTR_DOMAIN) == "logbook" +@pytest.mark.usefixtures("recorder_mock") async def test_service_call_create_logbook_entry_invalid_entity_id( - recorder_mock: Recorder, hass: HomeAssistant + hass: HomeAssistant, ) -> None: """Test if service call create log book entry with an invalid entity id.""" await async_setup_component(hass, "logbook", {}) @@ -156,7 +154,9 @@ async def test_service_call_create_logbook_entry_invalid_entity_id( assert events[0][logbook.ATTR_MESSAGE] == "is triggered" -async def test_service_call_create_log_book_entry_no_message(hass_) -> None: +async def test_service_call_create_log_book_entry_no_message( + hass_: HomeAssistant, +) -> None: """Test if service call create log book entry without message.""" calls = async_capture_events(hass_, logbook.EVENT_LOGBOOK_ENTRY) @@ -172,7 +172,7 @@ async def test_service_call_create_log_book_entry_no_message(hass_) -> None: async def test_filter_sensor( - hass_: ha.HomeAssistant, hass_client: ClientSessionGenerator + hass_: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test numeric sensors are filtered.""" @@ -220,7 +220,7 @@ async def test_filter_sensor( _assert_entry(entries[2], name="ble", entity_id=entity_id4, state="10") -async def test_home_assistant_start_stop_not_grouped(hass_) -> None: +async def test_home_assistant_start_stop_not_grouped(hass_: HomeAssistant) -> None: """Test if HA start and stop events are no longer grouped.""" await async_setup_component(hass_, "homeassistant", {}) await hass_.async_block_till_done() @@ -237,7 +237,7 @@ async def test_home_assistant_start_stop_not_grouped(hass_) -> None: assert_entry(entries[1], name="Home Assistant", message="started", domain=ha.DOMAIN) -async def test_home_assistant_start(hass_) -> None: +async def test_home_assistant_start(hass_: HomeAssistant) -> None: """Test if HA start is not filtered or converted into a restart.""" await async_setup_component(hass_, "homeassistant", {}) await hass_.async_block_till_done() @@ -257,7 +257,7 @@ async def test_home_assistant_start(hass_) -> None: assert_entry(entries[1], pointA, "bla", entity_id=entity_id) -def test_process_custom_logbook_entries(hass_) -> None: +def test_process_custom_logbook_entries(hass_: HomeAssistant) -> None: """Test if custom log book entries get added as an entry.""" name = "Nice name" message = "has a custom entry" @@ -324,55 +324,27 @@ def create_state_changed_event_from_old_new( entity_id, event_time_fired, old_state, new_state ): """Create a state changed event from a old and new state.""" - attributes = {} - if new_state is not None: - attributes = new_state.get("attributes") - attributes_json = json.dumps(attributes, cls=JSONEncoder) - row = collections.namedtuple( - "Row", - [ - "event_type", - "event_data", - "time_fired", - "time_fired_ts", - "context_id_bin", - "context_user_id_bin", - "context_parent_id_bin", - "state", - "entity_id", - "domain", - "attributes", - "state_id", - "old_state_id", - "shared_attrs", - "shared_data", - "context_only", - ], + row = EventAsRow( + row_id=1, + event_type=PSEUDO_EVENT_STATE_CHANGED, + event_data="{}", + time_fired_ts=dt_util.utc_to_timestamp(event_time_fired), + context_id_bin=None, + context_user_id_bin=None, + context_parent_id_bin=None, + state=new_state and new_state.get("state"), + entity_id=entity_id, + icon=None, + context_only=False, + data=None, + context=None, ) - - row.event_type = PSEUDO_EVENT_STATE_CHANGED - row.event_data = "{}" - row.shared_data = "{}" - row.attributes = attributes_json - row.shared_attrs = attributes_json - row.time_fired = event_time_fired - row.time_fired_ts = dt_util.utc_to_timestamp(event_time_fired) - row.state = new_state and new_state.get("state") - row.entity_id = entity_id - row.domain = entity_id and ha.split_entity_id(entity_id)[0] - row.context_only = False - row.context_id_bin = None - row.friendly_name = None - row.icon = None - row.context_user_id_bin = None - row.context_parent_id_bin = None - row.old_state_id = old_state and 1 - row.state_id = new_state and 1 return LazyEventPartialState(row, {}) +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view.""" await async_setup_component(hass, "logbook", {}) @@ -382,8 +354,9 @@ async def test_logbook_view( assert response.status == HTTPStatus.OK +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_invalid_start_date_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with an invalid date time.""" await async_setup_component(hass, "logbook", {}) @@ -393,8 +366,9 @@ async def test_logbook_view_invalid_start_date_time( assert response.status == HTTPStatus.BAD_REQUEST +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_invalid_end_date_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view.""" await async_setup_component(hass, "logbook", {}) @@ -406,11 +380,10 @@ async def test_logbook_view_invalid_end_date_time( assert response.status == HTTPStatus.BAD_REQUEST +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_logbook_view_period_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test the logbook view with period and entity.""" await async_setup_component(hass, "logbook", {}) @@ -492,8 +465,9 @@ async def test_logbook_view_period_entity( assert response_json[0]["entity_id"] == entity_id_test +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_describe_event( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test teaching logbook about a new event.""" @@ -540,8 +514,9 @@ async def test_logbook_describe_event( assert event["domain"] == "test_domain" +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_described_event( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test exclusions of events that are described by another integration.""" name = "My Automation Rule" @@ -549,7 +524,7 @@ async def test_exclude_described_event( entity_id2 = "automation.included_rule" entity_id3 = "sensor.excluded_domain" - def _describe(event): + def _describe(event: Event) -> dict[str, str]: """Describe an event.""" return { "name": "Test Name", @@ -557,7 +532,12 @@ async def test_exclude_described_event( "entity_id": event.data[ATTR_ENTITY_ID], } - def async_describe_events(hass, async_describe_event): + def async_describe_events( + hass: HomeAssistant, + async_describe_event: Callable[ + [str, str, Callable[[Event], dict[str, str]]], None + ], + ) -> None: """Mock to describe events.""" async_describe_event("automation", "some_automation_event", _describe) async_describe_event("sensor", "some_event", _describe) @@ -611,8 +591,9 @@ async def test_exclude_described_event( assert event["entity_id"] == "automation.included_rule" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_view_end_time_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity.""" await async_setup_component(hass, "logbook", {}) @@ -671,8 +652,9 @@ async def test_logbook_view_end_time_entity( assert response_json[0]["entity_id"] == entity_id_test +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_filter_with_automations( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -757,8 +739,9 @@ async def test_logbook_entity_filter_with_automations( assert json_dict[0]["entity_id"] == entity_id_second +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_no_longer_in_state_machine( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with an entity that hass been removed from the state machine.""" await async_setup_component(hass, "logbook", {}) @@ -796,11 +779,10 @@ async def test_logbook_entity_no_longer_in_state_machine( assert json_dict[0]["name"] == "area 001" +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_filter_continuous_sensor_values( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test remove continuous sensor events from logbook.""" await async_setup_component(hass, "logbook", {}) @@ -840,11 +822,10 @@ async def test_filter_continuous_sensor_values( assert response_json[1]["entity_id"] == entity_id_third +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_new_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events are excluded on first update.""" await asyncio.gather( @@ -882,11 +863,10 @@ async def test_exclude_new_entities( assert response_json[1]["message"] == "started" +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_removed_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events are excluded on last update.""" await asyncio.gather( @@ -931,11 +911,10 @@ async def test_exclude_removed_entities( assert response_json[2]["entity_id"] == entity_id2 +@pytest.mark.usefixtures("recorder_mock", "set_utc") async def test_exclude_attribute_changes( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator, - set_utc, ) -> None: """Test if events of attribute changes are filtered.""" await asyncio.gather( @@ -976,8 +955,9 @@ async def test_exclude_attribute_changes( assert response_json[2]["entity_id"] == "light.kitchen" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_context_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -1129,8 +1109,9 @@ async def test_logbook_entity_context_id( assert json_dict[7]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_context_id_automation_script_started_manually( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook populates context_ids for scripts and automations started manually.""" await asyncio.gather( @@ -1221,8 +1202,9 @@ async def test_logbook_context_id_automation_script_started_manually( assert json_dict[4]["context_domain"] == "script" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_entity_context_parent_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view links events via context parent_id.""" await asyncio.gather( @@ -1403,8 +1385,9 @@ async def test_logbook_entity_context_parent_id( assert json_dict[8]["context_user_id"] == "485cacf93ef84d25a99ced3126b921d2" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_context_from_template( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -1493,8 +1476,9 @@ async def test_logbook_context_from_template( assert json_dict[5]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a single entity and .""" await async_setup_component(hass, "logbook", {}) @@ -1564,8 +1548,9 @@ async def test_logbook_( assert json_dict[1]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_many_entities_multiple_calls( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a many entities called multiple times.""" await async_setup_component(hass, "logbook", {}) @@ -1636,8 +1621,9 @@ async def test_logbook_many_entities_multiple_calls( assert len(json_dict) == 0 +@pytest.mark.usefixtures("recorder_mock") async def test_custom_log_entry_discoverable_via_( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if a custom log entry is later discoverable via .""" await async_setup_component(hass, "logbook", {}) @@ -1673,8 +1659,9 @@ async def test_custom_log_entry_discoverable_via_( assert json_dict[0]["entity_id"] == "switch.test_switch" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_multiple_entities( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with a multiple entities.""" await async_setup_component(hass, "logbook", {}) @@ -1799,8 +1786,9 @@ async def test_logbook_multiple_entities( assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_invalid_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with requesting an invalid entity.""" await async_setup_component(hass, "logbook", {}) @@ -1819,8 +1807,9 @@ async def test_logbook_invalid_entity( assert response.status == HTTPStatus.INTERNAL_SERVER_ERROR +@pytest.mark.usefixtures("recorder_mock") async def test_icon_and_state( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test to ensure state and custom icons are returned.""" await asyncio.gather( @@ -1864,8 +1853,9 @@ async def test_icon_and_state( assert response_json[2]["state"] == STATE_OFF +@pytest.mark.usefixtures("recorder_mock") async def test_fire_logbook_entries( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test many logbook entry calls.""" await async_setup_component(hass, "logbook", {}) @@ -1902,8 +1892,9 @@ async def test_fire_logbook_entries( assert len(response_json) == 11 +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_domain( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain is excluded in config.""" entity_id = "switch.bla" @@ -1938,8 +1929,9 @@ async def test_exclude_events_domain( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_domain_glob( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain or glob is excluded in config.""" entity_id = "switch.bla" @@ -1983,8 +1975,9 @@ async def test_exclude_events_domain_glob( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if entity is included in config.""" entity_id = "sensor.bla" @@ -2025,8 +2018,9 @@ async def test_include_events_entity( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_exclude_events_entity( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if entity is excluded in config.""" entity_id = "sensor.bla" @@ -2061,8 +2055,9 @@ async def test_exclude_events_entity( _assert_entry(entries[1], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_domain( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain is included in config.""" assert await async_setup_component(hass, "alexa", {}) @@ -2105,8 +2100,9 @@ async def test_include_events_domain( _assert_entry(entries[2], name="blu", entity_id=entity_id2) +@pytest.mark.usefixtures("recorder_mock") async def test_include_events_domain_glob( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if domain or glob is included in config.""" assert await async_setup_component(hass, "alexa", {}) @@ -2164,8 +2160,9 @@ async def test_include_events_domain_glob( _assert_entry(entries[3], name="included", entity_id=entity_id3) +@pytest.mark.usefixtures("recorder_mock") async def test_include_exclude_events_no_globs( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -2222,8 +2219,9 @@ async def test_include_exclude_events_no_globs( _assert_entry(entries[5], name="keep", entity_id=entity_id4, state="10") +@pytest.mark.usefixtures("recorder_mock") async def test_include_exclude_events_with_glob_filters( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test if events are filtered if include and exclude is configured.""" entity_id = "switch.bla" @@ -2288,8 +2286,9 @@ async def test_include_exclude_events_with_glob_filters( _assert_entry(entries[6], name="included", entity_id=entity_id5, state="30") +@pytest.mark.usefixtures("recorder_mock") async def test_empty_config( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test we can handle an empty entity filter.""" entity_id = "sensor.blu" @@ -2322,8 +2321,9 @@ async def test_empty_config( _assert_entry(entries[1], name="blu", entity_id=entity_id) +@pytest.mark.usefixtures("recorder_mock") async def test_context_filter( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test we can filter by context.""" assert await async_setup_component(hass, "logbook", {}) @@ -2399,8 +2399,9 @@ def _assert_entry( assert state == entry["state"] +@pytest.mark.usefixtures("recorder_mock") async def test_get_events( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test logbook get_events.""" now = dt_util.utcnow() @@ -2519,8 +2520,9 @@ async def test_get_events( assert isinstance(results[0]["when"], float) +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_future_start_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events with a future start time.""" await async_setup_component(hass, "logbook", {}) @@ -2544,8 +2546,9 @@ async def test_get_events_future_start_time( assert len(results) == 0 +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_bad_start_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events bad start time.""" await async_setup_component(hass, "logbook", {}) @@ -2564,8 +2567,9 @@ async def test_get_events_bad_start_time( assert response["error"]["code"] == "invalid_start_time" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_bad_end_time( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events bad end time.""" now = dt_util.utcnow() @@ -2586,8 +2590,9 @@ async def test_get_events_bad_end_time( assert response["error"]["code"] == "invalid_end_time" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_invalid_filters( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test get_events invalid filters.""" await async_setup_component(hass, "logbook", {}) @@ -2616,8 +2621,8 @@ async def test_get_events_invalid_filters( assert response["error"]["code"] == "invalid_format" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_with_device_ids( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator, device_registry: dr.DeviceRegistry, @@ -2757,8 +2762,9 @@ async def test_get_events_with_device_ids( assert isinstance(results[3]["when"], float) +@pytest.mark.usefixtures("recorder_mock") async def test_logbook_select_entities_context_id( - recorder_mock: Recorder, hass: HomeAssistant, hass_client: ClientSessionGenerator + hass: HomeAssistant, hass_client: ClientSessionGenerator ) -> None: """Test the logbook view with end_time and entity with automations and scripts.""" await asyncio.gather( @@ -2892,8 +2898,9 @@ async def test_logbook_select_entities_context_id( assert json_dict[3]["context_user_id"] == "9400facee45711eaa9308bfd3d19e474" +@pytest.mark.usefixtures("recorder_mock") async def test_get_events_with_context_state( - recorder_mock: Recorder, hass: HomeAssistant, hass_ws_client: WebSocketGenerator + hass: HomeAssistant, hass_ws_client: WebSocketGenerator ) -> None: """Test logbook get_events with a context state.""" now = dt_util.utcnow() @@ -2957,9 +2964,8 @@ async def test_get_events_with_context_state( assert "context_event_type" not in results[3] -async def test_logbook_with_empty_config( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_logbook_with_empty_config(hass: HomeAssistant) -> None: """Test we handle a empty configuration.""" assert await async_setup_component( hass, @@ -2972,9 +2978,8 @@ async def test_logbook_with_empty_config( await hass.async_block_till_done() -async def test_logbook_with_non_iterable_entity_filter( - recorder_mock: Recorder, hass: HomeAssistant -) -> None: +@pytest.mark.usefixtures("recorder_mock") +async def test_logbook_with_non_iterable_entity_filter(hass: HomeAssistant) -> None: """Test we handle a non-iterable entity filter.""" assert await async_setup_component( hass, diff --git a/tests/components/logbook/test_models.py b/tests/components/logbook/test_models.py index 7021711014f..cfdd7efc727 100644 --- a/tests/components/logbook/test_models.py +++ b/tests/components/logbook/test_models.py @@ -2,20 +2,26 @@ from unittest.mock import Mock -from homeassistant.components.logbook.models import LazyEventPartialState +from homeassistant.components.logbook.models import EventAsRow, LazyEventPartialState def test_lazy_event_partial_state_context() -> None: """Test we can extract context from a lazy event partial state.""" state = LazyEventPartialState( - Mock( + EventAsRow( + row_id=1, + event_type="event_type", + event_data={}, + time_fired_ts=1, context_id_bin=b"1234123412341234", context_user_id_bin=b"1234123412341234", context_parent_id_bin=b"4444444444444444", - event_data={}, - event_type="event_type", - entity_id="entity_id", state="state", + entity_id="entity_id", + icon="icon", + context_only=False, + data={}, + context=Mock(), ), {}, ) diff --git a/tests/components/logbook/test_websocket_api.py b/tests/components/logbook/test_websocket_api.py index 9b1a6bb44cc..e5649564f94 100644 --- a/tests/components/logbook/test_websocket_api.py +++ b/tests/components/logbook/test_websocket_api.py @@ -48,12 +48,6 @@ from tests.components.recorder.common import ( from tests.typing import RecorderInstanceGenerator, WebSocketGenerator -@pytest.fixture -async def set_utc(hass): - """Set timezone to UTC.""" - await hass.config.async_set_time_zone("UTC") - - def listeners_without_writes(listeners: dict[str, int]) -> dict[str, int]: """Return listeners without final write listeners since we are not testing for these.""" return { diff --git a/tests/components/logger/test_init.py b/tests/components/logger/test_init.py index d6df1f92a72..24e58a77226 100644 --- a/tests/components/logger/test_init.py +++ b/tests/components/logger/test_init.py @@ -226,7 +226,7 @@ async def test_can_set_level_from_store( _reset_logging() -async def _assert_log_levels(hass): +async def _assert_log_levels(hass: HomeAssistant) -> None: assert logging.getLogger(UNCONFIG_NS).level == logging.NOTSET assert logging.getLogger(UNCONFIG_NS).isEnabledFor(logging.CRITICAL) is True assert ( diff --git a/tests/components/logi_circle/__init__.py b/tests/components/logi_circle/__init__.py deleted file mode 100644 index d2e2fbb8fdb..00000000000 --- a/tests/components/logi_circle/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Tests for the Logi Circle component.""" diff --git a/tests/components/logi_circle/test_config_flow.py b/tests/components/logi_circle/test_config_flow.py deleted file mode 100644 index ab4bae02ad6..00000000000 --- a/tests/components/logi_circle/test_config_flow.py +++ /dev/null @@ -1,227 +0,0 @@ -"""Tests for Logi Circle config flow.""" - -import asyncio -from collections.abc import Generator -from http import HTTPStatus -from typing import Any -from unittest.mock import AsyncMock, MagicMock, Mock, patch - -import pytest - -from homeassistant import config_entries -from homeassistant.components.http import KEY_HASS -from homeassistant.components.logi_circle import config_flow -from homeassistant.components.logi_circle.config_flow import ( - DOMAIN, - AuthorizationFailed, - LogiCircleAuthCallbackView, -) -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import AbortFlow, FlowResultType -from homeassistant.setup import async_setup_component - -from tests.common import MockConfigEntry - - -class MockRequest: - """Mock request passed to HomeAssistantView.""" - - def __init__(self, hass: HomeAssistant, query: dict[str, Any]) -> None: - """Init request object.""" - self.app = {KEY_HASS: hass} - self.query = query - - -def init_config_flow(hass: HomeAssistant) -> config_flow.LogiCircleFlowHandler: - """Init a configuration flow.""" - config_flow.register_flow_implementation( - hass, - DOMAIN, - client_id="id", - client_secret="secret", - api_key="123", - redirect_uri="http://example.com", - sensors=None, - ) - flow = config_flow.LogiCircleFlowHandler() - flow._get_authorization_url = Mock(return_value="http://example.com") - flow.hass = hass - return flow - - -@pytest.fixture -def mock_logi_circle() -> Generator[MagicMock]: - """Mock logi_circle.""" - with patch( - "homeassistant.components.logi_circle.config_flow.LogiCircle" - ) as logi_circle: - future = asyncio.Future() - future.set_result({"accountId": "testId"}) - LogiCircle = logi_circle() - LogiCircle.authorize = AsyncMock(return_value=True) - LogiCircle.close = AsyncMock(return_value=True) - LogiCircle.account = future - LogiCircle.authorize_url = "http://authorize.url" - yield LogiCircle - - -@pytest.mark.usefixtures("mock_logi_circle") -async def test_step_import(hass: HomeAssistant) -> None: - """Test that we trigger import when configuring with client.""" - flow = init_config_flow(hass) - - result = await flow.async_step_import() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - -@pytest.mark.usefixtures("mock_logi_circle") -async def test_full_flow_implementation(hass: HomeAssistant) -> None: - """Test registering an implementation and finishing flow works.""" - config_flow.register_flow_implementation( - hass, - "test-other", - client_id=None, - client_secret=None, - api_key=None, - redirect_uri=None, - sensors=None, - ) - flow = init_config_flow(hass) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "user" - - result = await flow.async_step_user({"flow_impl": "test-other"}) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - assert result["description_placeholders"] == { - "authorization_url": "http://example.com" - } - - result = await flow.async_step_code("123ABC") - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "Logi Circle ({})".format("testId") - - -async def test_we_reprompt_user_to_follow_link(hass: HomeAssistant) -> None: - """Test we prompt user to follow link if previously prompted.""" - flow = init_config_flow(hass) - - result = await flow.async_step_auth("dummy") - assert result["errors"]["base"] == "follow_link" - - -async def test_abort_if_no_implementation_registered(hass: HomeAssistant) -> None: - """Test we abort if no implementation is registered.""" - flow = config_flow.LogiCircleFlowHandler() - flow.hass = hass - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "missing_configuration" - - -async def test_abort_if_already_setup(hass: HomeAssistant) -> None: - """Test we abort if Logi Circle is already setup.""" - flow = init_config_flow(hass) - MockConfigEntry(domain=config_flow.DOMAIN).add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": config_entries.SOURCE_USER}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - result = await hass.config_entries.flow.async_init( - config_flow.DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" - - with pytest.raises(AbortFlow): - result = await flow.async_step_code() - - result = await flow.async_step_auth() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "external_setup" - - -@pytest.mark.parametrize( - ("side_effect", "error"), - [ - (TimeoutError, "authorize_url_timeout"), - (AuthorizationFailed, "invalid_auth"), - ], -) -async def test_abort_if_authorize_fails( - hass: HomeAssistant, - mock_logi_circle: MagicMock, - side_effect: type[Exception], - error: str, -) -> None: - """Test we abort if authorizing fails.""" - flow = init_config_flow(hass) - mock_logi_circle.authorize.side_effect = side_effect - - result = await flow.async_step_code("123ABC") - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "external_error" - - result = await flow.async_step_auth() - assert result["errors"]["base"] == error - - -async def test_not_pick_implementation_if_only_one(hass: HomeAssistant) -> None: - """Test we bypass picking implementation if we have one flow_imp.""" - flow = init_config_flow(hass) - - result = await flow.async_step_user() - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "auth" - - -@pytest.mark.usefixtures("mock_logi_circle") -async def test_gen_auth_url(hass: HomeAssistant) -> None: - """Test generating authorize URL from Logi Circle API.""" - config_flow.register_flow_implementation( - hass, - "test-auth-url", - client_id="id", - client_secret="secret", - api_key="123", - redirect_uri="http://example.com", - sensors=None, - ) - flow = config_flow.LogiCircleFlowHandler() - flow.hass = hass - flow.flow_impl = "test-auth-url" - await async_setup_component(hass, "http", {}) - - result = flow._get_authorization_url() - assert result == "http://authorize.url" - - -async def test_callback_view_rejects_missing_code(hass: HomeAssistant) -> None: - """Test the auth callback view rejects requests with no code.""" - view = LogiCircleAuthCallbackView() - resp = await view.get(MockRequest(hass, {})) - - assert resp.status == HTTPStatus.BAD_REQUEST - - -async def test_callback_view_accepts_code( - hass: HomeAssistant, mock_logi_circle: MagicMock -) -> None: - """Test the auth callback view handles requests with auth code.""" - init_config_flow(hass) - view = LogiCircleAuthCallbackView() - - resp = await view.get(MockRequest(hass, {"code": "456"})) - assert resp.status == HTTPStatus.OK - - await hass.async_block_till_done() - mock_logi_circle.authorize.assert_called_with("456") diff --git a/tests/components/logi_circle/test_init.py b/tests/components/logi_circle/test_init.py deleted file mode 100644 index d953acdf744..00000000000 --- a/tests/components/logi_circle/test_init.py +++ /dev/null @@ -1,68 +0,0 @@ -"""Tests for the Logi Circle integration.""" - -import asyncio -from collections.abc import Generator -from unittest.mock import AsyncMock, MagicMock, Mock, patch - -import pytest - -from homeassistant.components.logi_circle import DOMAIN -from homeassistant.config_entries import ConfigEntryState -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir - -from tests.common import MockConfigEntry - - -@pytest.fixture(name="disable_platforms") -def disable_platforms_fixture() -> Generator[None]: - """Disable logi_circle platforms.""" - with patch("homeassistant.components.logi_circle.PLATFORMS", []): - yield - - -@pytest.fixture -def mock_logi_circle() -> Generator[MagicMock]: - """Mock logi_circle.""" - - auth_provider_mock = Mock() - auth_provider_mock.close = AsyncMock() - auth_provider_mock.clear_authorization = AsyncMock() - - with patch("homeassistant.components.logi_circle.LogiCircle") as logi_circle: - future = asyncio.Future() - future.set_result({"accountId": "testId"}) - LogiCircle = logi_circle() - LogiCircle.auth_provider = auth_provider_mock - LogiCircle.synchronize_cameras = AsyncMock() - yield LogiCircle - - -@pytest.mark.usefixtures("disable_platforms", "mock_logi_circle") -async def test_repair_issue( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test the LogiCircle configuration entry loading/unloading handles the repair.""" - config_entry = MockConfigEntry( - title="Example 1", - domain=DOMAIN, - data={ - "api_key": "blah", - "client_id": "blah", - "client_secret": "blah", - "redirect_uri": "blah", - }, - ) - config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(config_entry.entry_id) - await hass.async_block_till_done() - assert config_entry.state is ConfigEntryState.LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) - - # Remove the entry - await hass.config_entries.async_remove(config_entry.entry_id) - await hass.async_block_till_done() - - assert config_entry.state is ConfigEntryState.NOT_LOADED - assert issue_registry.async_get_issue(DOMAIN, DOMAIN) is None diff --git a/tests/components/lupusec/test_config_flow.py b/tests/components/lupusec/test_config_flow.py index e106bbd5001..f354eaf0644 100644 --- a/tests/components/lupusec/test_config_flow.py +++ b/tests/components/lupusec/test_config_flow.py @@ -153,88 +153,3 @@ async def test_flow_user_init_data_already_configured(hass: HomeAssistant) -> No assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" - - -@pytest.mark.parametrize( - ("mock_import_step", "mock_title"), - [ - (MOCK_IMPORT_STEP, MOCK_IMPORT_STEP[CONF_IP_ADDRESS]), - (MOCK_IMPORT_STEP_NAME, MOCK_IMPORT_STEP_NAME[CONF_NAME]), - ], -) -async def test_flow_source_import( - hass: HomeAssistant, mock_import_step, mock_title -) -> None: - """Test configuration import from YAML.""" - with ( - patch( - "homeassistant.components.lupusec.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - patch( - "homeassistant.components.lupusec.config_flow.lupupy.Lupusec", - ) as mock_initialize_lupusec, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=mock_import_step, - ) - - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == mock_title - assert result["data"] == MOCK_DATA_STEP - assert len(mock_setup_entry.mock_calls) == 1 - assert len(mock_initialize_lupusec.mock_calls) == 1 - - -@pytest.mark.parametrize( - ("raise_error", "text_error"), - [ - (LupusecException("Test lupusec exception"), "cannot_connect"), - (JSONDecodeError("Test JSONDecodeError", "test", 1), "cannot_connect"), - (Exception("Test unknown exception"), "unknown"), - ], -) -async def test_flow_source_import_error_and_recover( - hass: HomeAssistant, raise_error, text_error -) -> None: - """Test exceptions and recovery.""" - - with patch( - "homeassistant.components.lupusec.config_flow.lupupy.Lupusec", - side_effect=raise_error, - ) as mock_initialize_lupusec: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=MOCK_IMPORT_STEP, - ) - - await hass.async_block_till_done() - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == text_error - assert len(mock_initialize_lupusec.mock_calls) == 1 - - -async def test_flow_source_import_already_configured(hass: HomeAssistant) -> None: - """Test duplicate config entry..""" - - entry = MockConfigEntry( - domain=DOMAIN, - title=MOCK_DATA_STEP[CONF_HOST], - data=MOCK_DATA_STEP, - ) - - entry.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data=MOCK_IMPORT_STEP, - ) - - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" diff --git a/tests/components/lutron_caseta/__init__.py b/tests/components/lutron_caseta/__init__.py index 9b25e2a0164..b27d30ac31f 100644 --- a/tests/components/lutron_caseta/__init__.py +++ b/tests/components/lutron_caseta/__init__.py @@ -101,7 +101,7 @@ async def async_setup_integration(hass: HomeAssistant, mock_bridge) -> MockConfi class MockBridge: """Mock Lutron bridge that emulates configured connected status.""" - def __init__(self, can_connect=True): + def __init__(self, can_connect=True) -> None: """Initialize MockBridge instance with configured mock connectivity.""" self.can_connect = can_connect self.is_currently_connected = False diff --git a/tests/components/lutron_caseta/test_device_trigger.py b/tests/components/lutron_caseta/test_device_trigger.py index 9353b897602..1ab45bf7582 100644 --- a/tests/components/lutron_caseta/test_device_trigger.py +++ b/tests/components/lutron_caseta/test_device_trigger.py @@ -98,7 +98,7 @@ MOCK_BUTTON_DEVICES = [ ] -async def _async_setup_lutron_with_picos(hass): +async def _async_setup_lutron_with_picos(hass: HomeAssistant) -> str: """Setups a lutron bridge with picos.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/lyric/test_config_flow.py b/tests/components/lyric/test_config_flow.py index 1e0ae04f741..e1916924e9f 100644 --- a/tests/components/lyric/test_config_flow.py +++ b/tests/components/lyric/test_config_flow.py @@ -126,9 +126,7 @@ async def test_reauthentication_flow( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=old_entry.data - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/madvr/const.py b/tests/components/madvr/const.py index 8c5e122377b..e1c5435fcbb 100644 --- a/tests/components/madvr/const.py +++ b/tests/components/madvr/const.py @@ -8,6 +8,7 @@ MOCK_CONFIG = { } MOCK_MAC = "00:11:22:33:44:55" +MOCK_MAC_NEW = "00:00:00:00:00:01" TEST_CON_ERROR = ConnectionError("Connection failed") TEST_IMP_ERROR = NotImplementedError("Not implemented") diff --git a/tests/components/madvr/test_config_flow.py b/tests/components/madvr/test_config_flow.py index 6dc84fd6b00..65eba05c802 100644 --- a/tests/components/madvr/test_config_flow.py +++ b/tests/components/madvr/test_config_flow.py @@ -6,12 +6,12 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.madvr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_USER +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PORT from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .const import MOCK_CONFIG, MOCK_MAC +from .const import MOCK_CONFIG, MOCK_MAC, MOCK_MAC_NEW from tests.common import MockConfigEntry @@ -126,3 +126,120 @@ async def test_duplicate( ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" + + +async def test_reconfigure_flow( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + assert result["errors"] == {} + + # define new host + new_host = "192.168.1.100" + # make sure setting port works + new_port = 44078 + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: new_host, CONF_PORT: new_port}, + ) + + # should get the abort with success result + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" + + # Verify that the config entry was updated + assert mock_config_entry.data[CONF_HOST] == new_host + assert mock_config_entry.data[CONF_PORT] == new_port + + # Verify that the connection was tested + mock_madvr_client.open_connection.assert_called() + mock_madvr_client.async_add_tasks.assert_called() + mock_madvr_client.async_cancel_tasks.assert_called() + + +async def test_reconfigure_new_device( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + # test reconfigure with a new device (should fail) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + + # define new host + new_host = "192.168.1.100" + # make sure setting port works + new_port = 44078 + + # modify test_connection so it returns new_mac + mock_madvr_client.mac_address = MOCK_MAC_NEW + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: new_host, CONF_PORT: new_port}, + ) + + # unique id should remain unchanged with new device, should fail + assert mock_config_entry.unique_id == MOCK_MAC + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "set_up_new_device" + + +async def test_reconfigure_flow_errors( + hass: HomeAssistant, + mock_madvr_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test error handling in reconfigure flow.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_RECONFIGURE, "entry_id": mock_config_entry.entry_id}, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reconfigure" + + # Test CannotConnect error + mock_madvr_client.open_connection.side_effect = TimeoutError + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + + # Test no_mac error + mock_madvr_client.open_connection.side_effect = None + mock_madvr_client.connected = True + mock_madvr_client.mac_address = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "no_mac"} + + # Ensure errors are recoverable + mock_madvr_client.mac_address = MOCK_MAC + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {CONF_HOST: "192.168.1.100", CONF_PORT: 44077}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reconfigure_successful" diff --git a/tests/components/mailbox/__init__.py b/tests/components/mailbox/__init__.py deleted file mode 100644 index 5e212354579..00000000000 --- a/tests/components/mailbox/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""The tests for mailbox platforms.""" diff --git a/tests/components/mailbox/test_init.py b/tests/components/mailbox/test_init.py deleted file mode 100644 index 6fcf9176aae..00000000000 --- a/tests/components/mailbox/test_init.py +++ /dev/null @@ -1,225 +0,0 @@ -"""The tests for the mailbox component.""" - -from datetime import datetime -from hashlib import sha1 -from http import HTTPStatus -from typing import Any - -from aiohttp.test_utils import TestClient -import pytest - -from homeassistant.components import mailbox -from homeassistant.core import HomeAssistant -from homeassistant.helpers import issue_registry as ir -from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType -from homeassistant.setup import async_setup_component -from homeassistant.util import dt as dt_util - -from tests.common import MockModule, mock_integration, mock_platform -from tests.typing import ClientSessionGenerator - -MAILBOX_NAME = "TestMailbox" -MEDIA_DATA = b"3f67c4ea33b37d1710f" -MESSAGE_TEXT = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - - -def _create_message(idx: int) -> dict[str, Any]: - """Create a sample message.""" - msgtime = dt_util.as_timestamp(datetime(2010, 12, idx + 1, 13, 17, 00)) - msgtxt = f"Message {idx + 1}. {MESSAGE_TEXT}" - msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() - return { - "info": { - "origtime": int(msgtime), - "callerid": "John Doe <212-555-1212>", - "duration": "10", - }, - "text": msgtxt, - "sha": msgsha, - } - - -class TestMailbox(mailbox.Mailbox): - """Test Mailbox, with 10 sample messages.""" - - # This class doesn't contain any tests! Skip pytest test collection. - __test__ = False - - def __init__(self, hass: HomeAssistant, name: str) -> None: - """Initialize Test mailbox.""" - super().__init__(hass, name) - self._messages: dict[str, dict[str, Any]] = {} - for idx in range(10): - msg = _create_message(idx) - msgsha = msg["sha"] - self._messages[msgsha] = msg - - @property - def media_type(self) -> str: - """Return the supported media type.""" - return mailbox.CONTENT_TYPE_MPEG - - @property - def can_delete(self) -> bool: - """Return if messages can be deleted.""" - return True - - @property - def has_media(self) -> bool: - """Return if messages have attached media files.""" - return True - - async def async_get_media(self, msgid: str) -> bytes: - """Return the media blob for the msgid.""" - if msgid not in self._messages: - raise mailbox.StreamError("Message not found") - - return MEDIA_DATA - - async def async_get_messages(self) -> list[dict[str, Any]]: - """Return a list of the current messages.""" - return sorted( - self._messages.values(), - key=lambda item: item["info"]["origtime"], # type: ignore[no-any-return] - reverse=True, - ) - - async def async_delete(self, msgid: str) -> bool: - """Delete the specified messages.""" - if msgid in self._messages: - del self._messages[msgid] - self.async_update() - return True - - -class MockMailbox: - """A mock mailbox platform.""" - - async def async_get_handler( - self, - hass: HomeAssistant, - config: ConfigType, - discovery_info: DiscoveryInfoType | None = None, - ) -> mailbox.Mailbox: - """Set up the Test mailbox.""" - return TestMailbox(hass, MAILBOX_NAME) - - -@pytest.fixture -def mock_mailbox(hass: HomeAssistant) -> None: - """Mock mailbox.""" - mock_integration(hass, MockModule(domain="test")) - mock_platform(hass, "test.mailbox", MockMailbox()) - - -@pytest.fixture -async def mock_http_client( - hass: HomeAssistant, hass_client: ClientSessionGenerator, mock_mailbox: None -) -> TestClient: - """Start the Home Assistant HTTP component.""" - assert await async_setup_component( - hass, mailbox.DOMAIN, {mailbox.DOMAIN: {"platform": "test"}} - ) - return await hass_client() - - -async def test_get_platforms_from_mailbox(mock_http_client: TestClient) -> None: - """Get platforms from mailbox.""" - url = "/api/mailbox/platforms" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 1 - assert result[0].get("name") == "TestMailbox" - - -async def test_get_messages_from_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - url = "/api/mailbox/messages/TestMailbox" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 10 - - -async def test_get_media_from_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - mp3sha = "7cad61312c7b66f619295be2da8c7ac73b4968f1" - msgtxt = "Message 1. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgsha = sha1(msgtxt.encode("utf-8")).hexdigest() - - url = f"/api/mailbox/media/TestMailbox/{msgsha}" - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - data = await req.read() - assert sha1(data).hexdigest() == mp3sha - - -async def test_delete_from_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - msgtxt1 = "Message 1. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgtxt2 = "Message 3. Lorem ipsum dolor sit amet, consectetur adipiscing elit. " - msgsha1 = sha1(msgtxt1.encode("utf-8")).hexdigest() - msgsha2 = sha1(msgtxt2.encode("utf-8")).hexdigest() - - for msg in (msgsha1, msgsha2): - url = f"/api/mailbox/delete/TestMailbox/{msg}" - req = await mock_http_client.delete(url) - assert req.status == HTTPStatus.OK - - url = "/api/mailbox/messages/TestMailbox" - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.OK - result = await req.json() - assert len(result) == 8 - - -async def test_get_messages_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - url = "/api/mailbox/messages/mailbox.invalid_mailbox" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_get_media_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/media/mailbox.invalid_mailbox/{msgsha}" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_get_media_from_invalid_msgid(mock_http_client: TestClient) -> None: - """Get messages from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/media/TestMailbox/{msgsha}" - - req = await mock_http_client.get(url) - assert req.status == HTTPStatus.INTERNAL_SERVER_ERROR - - -async def test_delete_from_invalid_mailbox(mock_http_client: TestClient) -> None: - """Get audio from mailbox.""" - msgsha = "0000000000000000000000000000000000000000" - url = f"/api/mailbox/delete/mailbox.invalid_mailbox/{msgsha}" - - req = await mock_http_client.delete(url) - assert req.status == HTTPStatus.NOT_FOUND - - -async def test_repair_issue_is_created( - hass: HomeAssistant, issue_registry: ir.IssueRegistry, mock_mailbox: None -) -> None: - """Test repair issue is created.""" - assert await async_setup_component( - hass, mailbox.DOMAIN, {mailbox.DOMAIN: {"platform": "test"}} - ) - await hass.async_block_till_done() - assert ( - mailbox.DOMAIN, - "deprecated_mailbox_test", - ) in issue_registry.issues diff --git a/tests/components/mailgun/test_init.py b/tests/components/mailgun/test_init.py index 908e98ae31e..2e60c56faa4 100644 --- a/tests/components/mailgun/test_init.py +++ b/tests/components/mailgun/test_init.py @@ -10,7 +10,7 @@ from homeassistant import config_entries from homeassistant.components import mailgun, webhook from homeassistant.config import async_process_ha_core_config from homeassistant.const import CONF_API_KEY, CONF_DOMAIN -from homeassistant.core import HomeAssistant, callback +from homeassistant.core import Event, HomeAssistant, callback from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -29,7 +29,7 @@ async def http_client( @pytest.fixture -async def webhook_id_with_api_key(hass): +async def webhook_id_with_api_key(hass: HomeAssistant) -> str: """Initialize the Mailgun component and get the webhook_id.""" await async_setup_component( hass, @@ -53,7 +53,7 @@ async def webhook_id_with_api_key(hass): @pytest.fixture -async def webhook_id_without_api_key(hass): +async def webhook_id_without_api_key(hass: HomeAssistant) -> str: """Initialize the Mailgun component and get the webhook_id w/o API key.""" await async_setup_component(hass, mailgun.DOMAIN, {}) @@ -73,7 +73,7 @@ async def webhook_id_without_api_key(hass): @pytest.fixture -async def mailgun_events(hass): +async def mailgun_events(hass: HomeAssistant) -> list[Event]: """Return a list of mailgun_events triggered.""" events = [] diff --git a/tests/components/manual/test_alarm_control_panel.py b/tests/components/manual/test_alarm_control_panel.py index 6c9ba9ee9a0..7900dfd1c91 100644 --- a/tests/components/manual/test_alarm_control_panel.py +++ b/tests/components/manual/test_alarm_control_panel.py @@ -9,6 +9,10 @@ import pytest from homeassistant.components import alarm_control_panel from homeassistant.components.alarm_control_panel import AlarmControlPanelEntityFeature from homeassistant.components.demo import alarm_control_panel as demo +from homeassistant.components.manual.alarm_control_panel import ( + ATTR_NEXT_STATE, + ATTR_PREVIOUS_STATE, +) from homeassistant.const import ( ATTR_CODE, ATTR_ENTITY_ID, @@ -28,7 +32,7 @@ from homeassistant.const import ( STATE_ALARM_TRIGGERED, ) from homeassistant.core import CoreState, HomeAssistant, State -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import ServiceValidationError from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util @@ -227,7 +231,7 @@ async def test_with_invalid_code(hass: HomeAssistant, service, expected_state) - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED - with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): + with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await hass.services.async_call( alarm_control_panel.DOMAIN, service, @@ -1089,7 +1093,7 @@ async def test_disarm_during_trigger_with_invalid_code(hass: HomeAssistant) -> N assert hass.states.get(entity_id).state == STATE_ALARM_PENDING - with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): + with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, entity_id=entity_id) assert hass.states.get(entity_id).state == STATE_ALARM_PENDING @@ -1133,7 +1137,7 @@ async def test_disarm_with_template_code(hass: HomeAssistant) -> None: state = hass.states.get(entity_id) assert state.state == STATE_ALARM_ARMED_HOME - with pytest.raises(HomeAssistantError, match=r"^Invalid alarm code provided$"): + with pytest.raises(ServiceValidationError, match=r"^Invalid alarm code provided$"): await common.async_alarm_disarm(hass, "def") state = hass.states.get(entity_id) @@ -1411,8 +1415,8 @@ async def test_restore_state_triggered(hass: HomeAssistant, previous_state) -> N state = hass.states.get(entity_id) assert state - assert state.attributes["previous_state"] == previous_state - assert "next_state" not in state.attributes + assert state.attributes[ATTR_PREVIOUS_STATE] == previous_state + assert state.attributes[ATTR_NEXT_STATE] is None assert state.state == STATE_ALARM_TRIGGERED future = time + timedelta(seconds=121) diff --git a/tests/components/marytts/test_tts.py b/tests/components/marytts/test_tts.py index 75784bb56c5..0ad27cde29b 100644 --- a/tests/components/marytts/test_tts.py +++ b/tests/components/marytts/test_tts.py @@ -34,9 +34,8 @@ def get_empty_wav() -> bytes: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/mastodon/conftest.py b/tests/components/mastodon/conftest.py index 03c3e754c11..c64de44d496 100644 --- a/tests/components/mastodon/conftest.py +++ b/tests/components/mastodon/conftest.py @@ -53,5 +53,7 @@ def mock_config_entry() -> MockConfigEntry: CONF_ACCESS_TOKEN: "access_token", }, entry_id="01J35M4AH9HYRC2V0G6RNVNWJH", - unique_id="client_id", + unique_id="trwnh_mastodon_social", + version=1, + minor_version=2, ) diff --git a/tests/components/mastodon/snapshots/test_diagnostics.ambr b/tests/components/mastodon/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..982ecee7ee2 --- /dev/null +++ b/tests/components/mastodon/snapshots/test_diagnostics.ambr @@ -0,0 +1,247 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'account': dict({ + 'acct': 'trwnh', + 'avatar': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', + 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/014/715/original/34aa222f4ae2e0a9.png', + 'bot': False, + 'created_at': '2016-11-24T10:02:12.085Z', + 'display_name': 'infinite love ⴳ', + 'emojis': list([ + dict({ + 'shortcode': 'fatyoshi', + 'static_url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/static/e57ecb623faa0dc9.png', + 'url': 'https://files.mastodon.social/custom_emojis/images/000/023/920/original/e57ecb623faa0dc9.png', + 'visible_in_picker': True, + }), + ]), + 'fields': list([ + dict({ + 'name': 'Website', + 'value': 'trwnh.com', + 'verified_at': '2019-08-29T04:14:55.571+00:00', + }), + dict({ + 'name': 'Sponsor', + 'value': 'liberapay.com/at', + 'verified_at': '2019-11-15T10:06:15.557+00:00', + }), + dict({ + 'name': 'Fan of:', + 'value': 'Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)', + 'verified_at': None, + }), + dict({ + 'name': 'Main topics:', + 'value': 'systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!', + 'verified_at': None, + }), + ]), + 'followers_count': 821, + 'following_count': 178, + 'header': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', + 'header_static': 'https://files.mastodon.social/accounts/headers/000/014/715/original/5c6fc24edb3bb873.jpg', + 'id': '14715', + 'last_status_at': '2019-11-24T15:49:42.251Z', + 'locked': False, + 'note': '

i have approximate knowledge of many things. perpetual student. (nb/ace/they)

xmpp/email: a@trwnh.com
trwnh.com
help me live: liberapay.com/at or paypal.me/trwnh

- my triggers are moths and glitter
- i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise
- dm me if i did something wrong, so i can improve
- purest person on fedi, do not lewd in my presence
- #1 ami cole fan account

:fatyoshi:

', + 'source': dict({ + 'fields': list([ + dict({ + 'name': 'Website', + 'value': 'https://trwnh.com', + 'verified_at': '2019-08-29T04:14:55.571+00:00', + }), + dict({ + 'name': 'Sponsor', + 'value': 'https://liberapay.com/at', + 'verified_at': '2019-11-15T10:06:15.557+00:00', + }), + dict({ + 'name': 'Fan of:', + 'value': "Punk-rock and post-hardcore (Circa Survive, letlive., La Dispute, THE FEVER 333)Manga (Yu-Gi-Oh!, One Piece, JoJo's Bizarre Adventure, Death Note, Shaman King)Platformers and RPGs (Banjo-Kazooie, Boktai, Final Fantasy Crystal Chronicles)", + 'verified_at': None, + }), + dict({ + 'name': 'Main topics:', + 'value': "systemic analysis, design patterns, anticapitalism, info/tech freedom, theory and philosophy, and otherwise being a genuine and decent wholesome poster. i'm just here to hang out and talk to cool people!", + 'verified_at': None, + }), + ]), + 'follow_requests_count': 0, + 'language': '', + 'note': ''' + i have approximate knowledge of many things. perpetual student. (nb/ace/they) + + xmpp/email: a@trwnh.com + https://trwnh.com + help me live: https://liberapay.com/at or https://paypal.me/trwnh + + - my triggers are moths and glitter + - i have all notifs except mentions turned off, so please interact if you wanna be friends! i literally will not notice otherwise + - dm me if i did something wrong, so i can improve + - purest person on fedi, do not lewd in my presence + - #1 ami cole fan account + + :fatyoshi: + ''', + 'privacy': 'public', + 'sensitive': False, + }), + 'statuses_count': 33120, + 'url': 'https://mastodon.social/@trwnh', + 'username': 'trwnh', + }), + 'instance': dict({ + 'configuration': dict({ + 'accounts': dict({ + 'max_featured_tags': 10, + 'max_pinned_statuses': 4, + }), + 'media_attachments': dict({ + 'image_matrix_limit': 16777216, + 'image_size_limit': 10485760, + 'supported_mime_types': list([ + 'image/jpeg', + 'image/png', + 'image/gif', + 'image/heic', + 'image/heif', + 'image/webp', + 'video/webm', + 'video/mp4', + 'video/quicktime', + 'video/ogg', + 'audio/wave', + 'audio/wav', + 'audio/x-wav', + 'audio/x-pn-wave', + 'audio/vnd.wave', + 'audio/ogg', + 'audio/vorbis', + 'audio/mpeg', + 'audio/mp3', + 'audio/webm', + 'audio/flac', + 'audio/aac', + 'audio/m4a', + 'audio/x-m4a', + 'audio/mp4', + 'audio/3gpp', + 'video/x-ms-asf', + ]), + 'video_frame_rate_limit': 60, + 'video_matrix_limit': 2304000, + 'video_size_limit': 41943040, + }), + 'polls': dict({ + 'max_characters_per_option': 50, + 'max_expiration': 2629746, + 'max_options': 4, + 'min_expiration': 300, + }), + 'statuses': dict({ + 'characters_reserved_per_url': 23, + 'max_characters': 500, + 'max_media_attachments': 4, + }), + 'translation': dict({ + 'enabled': True, + }), + 'urls': dict({ + 'streaming': 'wss://mastodon.social', + }), + 'vapid': dict({ + 'public_key': 'BCkMmVdKDnKYwzVCDC99Iuc9GvId-x7-kKtuHnLgfF98ENiZp_aj-UNthbCdI70DqN1zUVis-x0Wrot2sBagkMc=', + }), + }), + 'contact': dict({ + 'account': dict({ + 'acct': 'Gargron', + 'avatar': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', + 'avatar_static': 'https://files.mastodon.social/accounts/avatars/000/000/001/original/dc4286ceb8fab734.jpg', + 'bot': False, + 'created_at': '2016-03-16T00:00:00.000Z', + 'discoverable': True, + 'display_name': 'Eugen 💀', + 'emojis': list([ + ]), + 'fields': list([ + dict({ + 'name': 'Patreon', + 'value': 'patreon.com/mastodon', + 'verified_at': None, + }), + ]), + 'followers_count': 133026, + 'following_count': 311, + 'group': False, + 'header': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', + 'header_static': 'https://files.mastodon.social/accounts/headers/000/000/001/original/3b91c9965d00888b.jpeg', + 'id': '1', + 'last_status_at': '2022-10-31', + 'locked': False, + 'noindex': False, + 'note': '

Founder, CEO and lead developer @Mastodon, Germany.

', + 'statuses_count': 72605, + 'url': 'https://mastodon.social/@Gargron', + 'username': 'Gargron', + }), + 'email': 'staff@mastodon.social', + }), + 'description': 'The original server operated by the Mastodon gGmbH non-profit', + 'domain': 'mastodon.social', + 'languages': list([ + 'en', + ]), + 'registrations': dict({ + 'approval_required': False, + 'enabled': False, + 'message': None, + }), + 'rules': list([ + dict({ + 'id': '1', + 'text': 'Sexually explicit or violent media must be marked as sensitive when posting', + }), + dict({ + 'id': '2', + 'text': 'No racism, sexism, homophobia, transphobia, xenophobia, or casteism', + }), + dict({ + 'id': '3', + 'text': 'No incitement of violence or promotion of violent ideologies', + }), + dict({ + 'id': '4', + 'text': 'No harassment, dogpiling or doxxing of other users', + }), + dict({ + 'id': '5', + 'text': 'No content illegal in Germany', + }), + dict({ + 'id': '7', + 'text': 'Do not share intentionally false or misleading information', + }), + ]), + 'source_url': 'https://github.com/mastodon/mastodon', + 'thumbnail': dict({ + 'blurhash': 'UeKUpFxuo~R%0nW;WCnhF6RjaJt757oJodS$', + 'url': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', + 'versions': dict({ + '@1x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@1x/57c12f441d083cde.png', + '@2x': 'https://files.mastodon.social/site_uploads/files/000/000/001/@2x/57c12f441d083cde.png', + }), + }), + 'title': 'Mastodon', + 'usage': dict({ + 'users': dict({ + 'active_month': 123122, + }), + }), + 'version': '4.0.0rc1', + }), + }) +# --- diff --git a/tests/components/mastodon/snapshots/test_init.ambr b/tests/components/mastodon/snapshots/test_init.ambr index f0b650076be..37fa765acea 100644 --- a/tests/components/mastodon/snapshots/test_init.ambr +++ b/tests/components/mastodon/snapshots/test_init.ambr @@ -13,7 +13,7 @@ 'identifiers': set({ tuple( 'mastodon', - 'client_id', + 'trwnh_mastodon_social', ), }), 'is_new': False, @@ -22,7 +22,7 @@ 'manufacturer': 'Mastodon gGmbH', 'model': '@trwnh@mastodon.social', 'model_id': None, - 'name': 'Mastodon', + 'name': 'Mastodon @trwnh@mastodon.social', 'name_by_user': None, 'primary_config_entry': , 'serial_number': None, diff --git a/tests/components/mastodon/snapshots/test_sensor.ambr b/tests/components/mastodon/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c8df8cdab19 --- /dev/null +++ b/tests/components/mastodon/snapshots/test_sensor.ambr @@ -0,0 +1,151 @@ +# serializer version: 1 +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_followers-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_followers', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Followers', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'followers', + 'unique_id': 'trwnh_mastodon_social_followers', + 'unit_of_measurement': 'accounts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_followers-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Followers', + 'state_class': , + 'unit_of_measurement': 'accounts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_followers', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '821', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_following', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Following', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'following', + 'unique_id': 'trwnh_mastodon_social_following', + 'unit_of_measurement': 'accounts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_following-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Following', + 'state_class': , + 'unit_of_measurement': 'accounts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_following', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '178', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_posts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Posts', + 'platform': 'mastodon', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'posts', + 'unique_id': 'trwnh_mastodon_social_posts', + 'unit_of_measurement': 'posts', + }) +# --- +# name: test_sensors[sensor.mastodon_trwnh_mastodon_social_posts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Mastodon @trwnh@mastodon.social Posts', + 'state_class': , + 'unit_of_measurement': 'posts', + }), + 'context': , + 'entity_id': 'sensor.mastodon_trwnh_mastodon_social_posts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33120', + }) +# --- diff --git a/tests/components/mastodon/test_config_flow.py b/tests/components/mastodon/test_config_flow.py index 01cdc061d3e..073a6534d7d 100644 --- a/tests/components/mastodon/test_config_flow.py +++ b/tests/components/mastodon/test_config_flow.py @@ -44,7 +44,7 @@ async def test_full_flow( CONF_CLIENT_SECRET: "client_secret", CONF_ACCESS_TOKEN: "access_token", } - assert result["result"].unique_id == "client_id" + assert result["result"].unique_id == "trwnh_mastodon_social" @pytest.mark.parametrize( diff --git a/tests/components/mastodon/test_diagnostics.py b/tests/components/mastodon/test_diagnostics.py new file mode 100644 index 00000000000..c2de15d1a51 --- /dev/null +++ b/tests/components/mastodon/test_diagnostics.py @@ -0,0 +1,28 @@ +"""Test Mastodon diagnostics.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry) + assert ( + await get_diagnostics_for_config_entry(hass, hass_client, mock_config_entry) + == snapshot + ) diff --git a/tests/components/mastodon/test_init.py b/tests/components/mastodon/test_init.py index 53796e39782..c3d0728fe08 100644 --- a/tests/components/mastodon/test_init.py +++ b/tests/components/mastodon/test_init.py @@ -3,15 +3,36 @@ from unittest.mock import AsyncMock from mastodon.Mastodon import MastodonError +from syrupy.assertion import SnapshotAssertion +from homeassistant.components.mastodon.config_flow import MastodonConfigFlow +from homeassistant.components.mastodon.const import CONF_BASE_URL, DOMAIN from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_ACCESS_TOKEN, CONF_CLIENT_ID, CONF_CLIENT_SECRET from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr from . import setup_integration from tests.common import MockConfigEntry +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry integration.""" + await setup_integration(hass, mock_config_entry) + device_entry = device_registry.async_get_device( + identifiers={(DOMAIN, mock_config_entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot + + async def test_initialization_failure( hass: HomeAssistant, mock_mastodon_client: AsyncMock, @@ -23,3 +44,39 @@ async def test_initialization_failure( await setup_integration(hass, mock_config_entry) assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_migrate( + hass: HomeAssistant, + mock_mastodon_client: AsyncMock, +) -> None: + """Test migration.""" + # Setup the config entry + config_entry = MockConfigEntry( + domain=DOMAIN, + data={ + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + }, + title="@trwnh@mastodon.social", + unique_id="client_id", + version=1, + minor_version=1, + ) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + # Check migration was successful + assert config_entry.state is ConfigEntryState.LOADED + assert config_entry.data == { + CONF_BASE_URL: "https://mastodon.social", + CONF_CLIENT_ID: "client_id", + CONF_CLIENT_SECRET: "client_secret", + CONF_ACCESS_TOKEN: "access_token", + } + assert config_entry.version == MastodonConfigFlow.VERSION + assert config_entry.minor_version == MastodonConfigFlow.MINOR_VERSION + assert config_entry.unique_id == "trwnh_mastodon_social" diff --git a/tests/components/mastodon/test_sensor.py b/tests/components/mastodon/test_sensor.py new file mode 100644 index 00000000000..343505260e2 --- /dev/null +++ b/tests/components/mastodon/test_sensor.py @@ -0,0 +1,27 @@ +"""Tests for the Mastodon sensors.""" + +from unittest.mock import AsyncMock, patch + +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_sensors( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + entity_registry: er.EntityRegistry, + mock_mastodon_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the sensor entities.""" + with patch("homeassistant.components.mastodon.PLATFORMS", [Platform.SENSOR]): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/matter/conftest.py b/tests/components/matter/conftest.py index f3d8740a73b..b4af00a0b47 100644 --- a/tests/components/matter/conftest.py +++ b/tests/components/matter/conftest.py @@ -3,7 +3,7 @@ from __future__ import annotations import asyncio -from collections.abc import AsyncGenerator, Generator +from collections.abc import AsyncGenerator from unittest.mock import AsyncMock, MagicMock, patch from matter_server.client.models.node import MatterNode @@ -70,153 +70,6 @@ async def integration_fixture( return entry -@pytest.fixture(name="create_backup") -def create_backup_fixture() -> Generator[AsyncMock]: - """Mock Supervisor create backup of add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_create_backup" - ) as create_backup: - yield create_backup - - -@pytest.fixture(name="addon_store_info") -def addon_store_info_fixture() -> Generator[AsyncMock]: - """Mock Supervisor add-on store info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info" - ) as addon_store_info: - addon_store_info.return_value = { - "available": False, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_info") -def addon_info_fixture() -> Generator[AsyncMock]: - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - ) as addon_info: - addon_info.return_value = { - "available": False, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="addon_not_installed") -def addon_not_installed_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on not installed.""" - addon_store_info.return_value["available"] = True - return addon_info - - -@pytest.fixture(name="addon_installed") -def addon_installed_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-matter-server" - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_running") -def addon_running_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["hostname"] = "core-matter-server" - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="install_addon") -def install_addon_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Generator[AsyncMock]: - """Mock install add-on.""" - - async def install_addon_side_effect(hass: HomeAssistant, slug: str) -> None: - """Mock install add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon" - ) as install_addon: - install_addon.side_effect = install_addon_side_effect - yield install_addon - - -@pytest.fixture(name="start_addon") -def start_addon_fixture() -> Generator[AsyncMock]: - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon" - ) as start_addon: - yield start_addon - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture() -> Generator[AsyncMock]: - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture() -> Generator[AsyncMock]: - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon - - -@pytest.fixture(name="update_addon") -def update_addon_fixture() -> Generator[AsyncMock]: - """Mock update add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_update_addon" - ) as update_addon: - yield update_addon - - @pytest.fixture(name="door_lock") async def door_lock_fixture( hass: HomeAssistant, matter_client: MagicMock diff --git a/tests/components/matter/test_config_flow.py b/tests/components/matter/test_config_flow.py index 642bfe0f804..a4ddc18802f 100644 --- a/tests/components/matter/test_config_flow.py +++ b/tests/components/matter/test_config_flow.py @@ -4,8 +4,7 @@ from __future__ import annotations from collections.abc import Generator from ipaddress import ip_address -from typing import Any -from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch +from unittest.mock import AsyncMock, MagicMock, call, patch from matter_server.client.exceptions import CannotConnect, InvalidServerVersion import pytest @@ -93,20 +92,9 @@ def supervisor_fixture() -> Generator[MagicMock]: yield is_hassio -@pytest.fixture(name="discovery_info") -def discovery_info_fixture() -> Any: - """Return the discovery info from the supervisor.""" - return DEFAULT - - -@pytest.fixture(name="get_addon_discovery_info", autouse=True) -def get_addon_discovery_info_fixture(discovery_info: Any) -> Generator[AsyncMock]: +@pytest.fixture(autouse=True) +def mock_get_addon_discovery_info(get_addon_discovery_info: AsyncMock) -> None: """Mock get add-on discovery info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", - return_value=discovery_info, - ) as get_addon_discovery_info: - yield get_addon_discovery_info @pytest.fixture(name="addon_setup_time", autouse=True) diff --git a/tests/components/matter/test_update.py b/tests/components/matter/test_update.py index 73c69407bbc..19c57b0f3c7 100644 --- a/tests/components/matter/test_update.py +++ b/tests/components/matter/test_update.py @@ -5,12 +5,26 @@ from unittest.mock import AsyncMock, MagicMock from chip.clusters import Objects as clusters from chip.clusters.ClusterObjects import ClusterAttributeDescriptor +from freezegun.api import FrozenDateTimeFactory from matter_server.client.models.node import MatterNode +from matter_server.common.errors import UpdateCheckError, UpdateError from matter_server.common.models import MatterSoftwareVersion, UpdateSource import pytest +from homeassistant.components.homeassistant import ( + DOMAIN as HA_DOMAIN, + SERVICE_UPDATE_ENTITY, +) +from homeassistant.components.matter.update import SCAN_INTERVAL +from homeassistant.components.update import ( + ATTR_VERSION, + DOMAIN as UPDATE_DOMAIN, + SERVICE_INSTALL, +) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers.restore_state import STORAGE_KEY as RESTORE_STATE_KEY from homeassistant.setup import async_setup_component from .common import ( @@ -19,6 +33,24 @@ from .common import ( trigger_subscription_callback, ) +from tests.common import ( + async_fire_time_changed, + async_mock_restore_state_shutdown_restart, + mock_restore_cache_with_extra_data, +) + +TEST_SOFTWARE_VERSION = MatterSoftwareVersion( + vid=65521, + pid=32768, + software_version=2, + software_version_string="v2.0", + firmware_information="", + min_applicable_software_version=0, + max_applicable_software_version=1, + release_notes_url="http://home-assistant.io/non-existing-product", + update_source=UpdateSource.LOCAL, +) + def set_node_attribute_typed( node: MatterNode, @@ -34,11 +66,18 @@ def set_node_attribute_typed( @pytest.fixture(name="check_node_update") async def check_node_update_fixture(matter_client: MagicMock) -> AsyncMock: - """Fixture for a flow sensor node.""" + """Fixture to check for node updates.""" matter_client.check_node_update = AsyncMock(return_value=None) return matter_client.check_node_update +@pytest.fixture(name="update_node") +async def update_node_fixture(matter_client: MagicMock) -> AsyncMock: + """Fixture to install update.""" + matter_client.update_node = AsyncMock(return_value=None) + return matter_client.update_node + + @pytest.fixture(name="updateable_node") async def updateable_node_fixture( hass: HomeAssistant, matter_client: MagicMock @@ -63,19 +102,19 @@ async def test_update_entity( assert matter_client.check_node_update.call_count == 1 -async def test_update_install( +async def test_update_check_service( hass: HomeAssistant, matter_client: MagicMock, check_node_update: AsyncMock, updateable_node: MatterNode, ) -> None: - """Test update entity exists and update check got made.""" + """Test check device update through service call.""" state = hass.states.get("update.mock_dimmable_light") assert state assert state.state == STATE_OFF assert state.attributes.get("installed_version") == "v1.0" - await async_setup_component(hass, "homeassistant", {}) + await async_setup_component(hass, HA_DOMAIN, {}) check_node_update.return_value = MatterSoftwareVersion( vid=65521, @@ -90,8 +129,8 @@ async def test_update_install( ) await hass.services.async_call( - "homeassistant", - "update_entity", + HA_DOMAIN, + SERVICE_UPDATE_ENTITY, { ATTR_ENTITY_ID: "update.mock_dimmable_light", }, @@ -109,11 +148,50 @@ async def test_update_install( == "http://home-assistant.io/non-existing-product" ) - await async_setup_component(hass, "update", {}) + +async def test_update_install( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + updateable_node: MatterNode, + freezer: FrozenDateTimeFactory, +) -> None: + """Test device update with Matter attribute changes influence progress.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v1.0" + + check_node_update.return_value = MatterSoftwareVersion( + vid=65521, + pid=32768, + software_version=2, + software_version_string="v2.0", + firmware_information="", + min_applicable_software_version=0, + max_applicable_software_version=1, + release_notes_url="http://home-assistant.io/non-existing-product", + update_source=UpdateSource.LOCAL, + ) + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert matter_client.check_node_update.call_count == 2 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + assert ( + state.attributes.get("release_url") + == "http://home-assistant.io/non-existing-product" + ) await hass.services.async_call( - "update", - "install", + UPDATE_DOMAIN, + SERVICE_INSTALL, { ATTR_ENTITY_ID: "update.mock_dimmable_light", }, @@ -169,3 +247,173 @@ async def test_update_install( state = hass.states.get("update.mock_dimmable_light") assert state.state == STATE_OFF assert state.attributes.get("installed_version") == "v2.0" + + +async def test_update_install_failure( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + update_node: AsyncMock, + updateable_node: MatterNode, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update entity service call errors.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v1.0" + + check_node_update.return_value = MatterSoftwareVersion( + vid=65521, + pid=32768, + software_version=2, + software_version_string="v2.0", + firmware_information="", + min_applicable_software_version=0, + max_applicable_software_version=1, + release_notes_url="http://home-assistant.io/non-existing-product", + update_source=UpdateSource.LOCAL, + ) + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert matter_client.check_node_update.call_count == 2 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + assert ( + state.attributes.get("release_url") + == "http://home-assistant.io/non-existing-product" + ) + + update_node.side_effect = UpdateCheckError("Error finding applicable update") + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + ATTR_VERSION: "v3.0", + }, + blocking=True, + ) + + update_node.side_effect = UpdateError("Error updating node") + + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + ATTR_VERSION: "v3.0", + }, + blocking=True, + ) + + +async def test_update_state_save_and_restore( + hass: HomeAssistant, + hass_storage: dict[str, Any], + matter_client: MagicMock, + check_node_update: AsyncMock, + updateable_node: MatterNode, + freezer: FrozenDateTimeFactory, +) -> None: + """Test latest update information is retained across reload/restart.""" + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_OFF + assert state.attributes.get("installed_version") == "v1.0" + + check_node_update.return_value = TEST_SOFTWARE_VERSION + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + + assert matter_client.check_node_update.call_count == 2 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + await hass.async_block_till_done() + await async_mock_restore_state_shutdown_restart(hass) + + assert len(hass_storage[RESTORE_STATE_KEY]["data"]) == 1 + state = hass_storage[RESTORE_STATE_KEY]["data"][0]["state"] + assert state["entity_id"] == "update.mock_dimmable_light" + extra_data = hass_storage[RESTORE_STATE_KEY]["data"][0]["extra_data"] + + # Check that the extra data has the format we expect. + assert extra_data == { + "software_update": { + "vid": 65521, + "pid": 32768, + "software_version": 2, + "software_version_string": "v2.0", + "firmware_information": "", + "min_applicable_software_version": 0, + "max_applicable_software_version": 1, + "release_notes_url": "http://home-assistant.io/non-existing-product", + "update_source": "local", + } + } + + +async def test_update_state_restore( + hass: HomeAssistant, + matter_client: MagicMock, + check_node_update: AsyncMock, + update_node: AsyncMock, +) -> None: + """Test latest update information extra data is restored.""" + mock_restore_cache_with_extra_data( + hass, + ( + ( + State( + "update.mock_dimmable_light", + STATE_ON, + { + "auto_update": False, + "installed_version": "v1.0", + "in_progress": False, + "latest_version": "v2.0", + }, + ), + {"software_update": TEST_SOFTWARE_VERSION.as_dict()}, + ), + ), + ) + await setup_integration_with_node_fixture(hass, "dimmable-light", matter_client) + + assert check_node_update.call_count == 0 + + state = hass.states.get("update.mock_dimmable_light") + assert state + assert state.state == STATE_ON + assert state.attributes.get("latest_version") == "v2.0" + + await hass.services.async_call( + UPDATE_DOMAIN, + SERVICE_INSTALL, + { + ATTR_ENTITY_ID: "update.mock_dimmable_light", + }, + blocking=True, + ) + + # Validate that the integer software version from the extra data is passed + # to the update_node call. + assert update_node.call_count == 1 + assert ( + update_node.call_args[1]["software_version"] + == TEST_SOFTWARE_VERSION.software_version + ) diff --git a/tests/components/mealie/snapshots/test_diagnostics.ambr b/tests/components/mealie/snapshots/test_diagnostics.ambr index e6c72c950cc..a694c72fcf6 100644 --- a/tests/components/mealie/snapshots/test_diagnostics.ambr +++ b/tests/components/mealie/snapshots/test_diagnostics.ambr @@ -10,6 +10,7 @@ 'description': None, 'entry_type': 'breakfast', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -18,6 +19,7 @@ 'recipe': dict({ 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'JeQ2', 'name': 'Roast Chicken', 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', @@ -35,6 +37,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -43,6 +46,7 @@ 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -58,6 +62,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -66,6 +71,7 @@ 'recipe': dict({ 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'En9o', 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', @@ -81,6 +87,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -89,6 +96,7 @@ 'recipe': dict({ 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'Kn62', 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', @@ -104,6 +112,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -112,6 +121,7 @@ 'recipe': dict({ 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'ibL6', 'name': 'Pampered Chef Double Chocolate Mocha Trifle', 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', @@ -127,6 +137,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -135,6 +146,7 @@ 'recipe': dict({ 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'beGq', 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', @@ -150,6 +162,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -158,6 +171,7 @@ 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -173,6 +187,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -181,6 +196,7 @@ 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -196,6 +212,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -204,6 +221,7 @@ 'recipe': dict({ 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '5G1v', 'name': 'Miso Udon Noodles with Spinach and Tofu', 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', @@ -219,6 +237,7 @@ 'description': None, 'entry_type': 'dinner', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -227,6 +246,7 @@ 'recipe': dict({ 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'rrNL', 'name': 'Mousse de saumon', 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', @@ -242,6 +262,7 @@ 'description': 'Dineren met de boys', 'entry_type': 'dinner', 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-21', @@ -257,6 +278,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -265,6 +287,7 @@ 'recipe': dict({ 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'INQz', 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', @@ -280,6 +303,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -288,6 +312,7 @@ 'recipe': dict({ 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nj5M', 'name': 'Boeuf bourguignon : la vraie recette (2)', 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', @@ -303,6 +328,7 @@ 'description': None, 'entry_type': 'lunch', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-22', @@ -311,6 +337,7 @@ 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -328,6 +355,7 @@ 'description': None, 'entry_type': 'side', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': dict({ '__type': "", 'isoformat': '2024-01-23', @@ -336,6 +364,7 @@ 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', diff --git a/tests/components/mealie/snapshots/test_services.ambr b/tests/components/mealie/snapshots/test_services.ambr index 3ae158f1d2d..4f9ee6a5c09 100644 --- a/tests/components/mealie/snapshots/test_services.ambr +++ b/tests/components/mealie/snapshots/test_services.ambr @@ -5,6 +5,7 @@ 'date_added': datetime.date(2024, 6, 29), 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'household_id': None, 'image': 'SuPW', 'ingredients': list([ dict({ @@ -196,11 +197,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -216,11 +219,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 229, 'recipe': dict({ 'description': 'The BEST Roast Chicken recipe is simple, budget friendly, and gives you a tender, mouth-watering chicken full of flavor! Served with roasted vegetables, this recipe is simple enough for any cook!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'JeQ2', 'name': 'Roast Chicken', 'original_url': 'https://tastesbetterfromscratch.com/roast-chicken/', @@ -236,11 +241,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 226, 'recipe': dict({ 'description': 'Te explicamos paso a paso, de manera sencilla, la elaboración de la receta de pollo al curry con leche de coco en 10 minutos. Ingredientes, tiempo de...', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'INQz', 'name': 'Receta de pollo al curry en 10 minutos (con vídeo incluido)', 'original_url': 'https://www.directoalpaladar.com/recetas-de-carnes-y-aves/receta-de-pollo-al-curry-en-10-minutos', @@ -256,11 +263,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 224, 'recipe': dict({ 'description': 'bourguignon, oignon, carotte, bouquet garni, vin rouge, beurre, sel, poivre', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nj5M', 'name': 'Boeuf bourguignon : la vraie recette (2)', 'original_url': 'https://www.marmiton.org/recettes/recette_boeuf-bourguignon_18889.aspx', @@ -276,11 +285,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 222, 'recipe': dict({ 'description': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο από τον Άκη Πετρετζίκη. Φτιάξτε την πιο εύκολη μακαρονάδα με κεφτεδάκια σε μόνο ένα σκεύος.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'En9o', 'name': 'Εύκολη μακαρονάδα με κεφτεδάκια στον φούρνο (1)', 'original_url': 'https://akispetretzikis.com/recipe/7959/efkolh-makaronada-me-keftedakia-ston-fourno', @@ -296,11 +307,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 221, 'recipe': dict({ 'description': 'Delicious Greek turkey meatballs with lemon orzo, tender veggies, and a creamy feta yogurt sauce. These healthy baked Greek turkey meatballs are filled with tons of wonderful herbs and make the perfect protein-packed weeknight meal!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'Kn62', 'name': 'Greek Turkey Meatballs with Lemon Orzo & Creamy Feta Yogurt Sauce', 'original_url': 'https://www.ambitiouskitchen.com/greek-turkey-meatballs/', @@ -316,11 +329,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 220, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -336,11 +351,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 219, 'recipe': dict({ 'description': 'This is a modified Pampered Chef recipe. You can use a trifle bowl or large glass punch/salad bowl to show it off. It is really easy to make and I never have any leftovers. Cook time includes chill time.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'ibL6', 'name': 'Pampered Chef Double Chocolate Mocha Trifle', 'original_url': 'https://www.food.com/recipe/pampered-chef-double-chocolate-mocha-trifle-74963', @@ -356,11 +373,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 217, 'recipe': dict({ 'description': 'Cheeseburger Sliders are juicy, cheesy and beefy - everything we love about classic burgers! These sliders are quick and easy plus they are make-ahead and reheat really well.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'beGq', 'name': 'Cheeseburger Sliders (Easy, 30-min Recipe)', 'original_url': 'https://natashaskitchen.com/cheeseburger-sliders/', @@ -376,11 +395,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 216, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -396,11 +417,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 212, 'recipe': dict({ 'description': 'This All-American beef stew recipe includes tender beef coated in a rich, intense sauce and vegetables that bring complementary texture and flavor.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '356X', 'name': 'All-American Beef Stew Recipe', 'original_url': 'https://www.seriouseats.com/all-american-beef-stew-recipe', @@ -416,11 +439,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 211, 'recipe': dict({ 'description': 'Einfacher Nudelauflauf mit Brokkoli, Sahnesauce und extra Käse. Dieses vegetarische 5 Zutaten Rezept ist super schnell gemacht und SO gut!', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'nOPT', 'name': 'Einfacher Nudelauflauf mit Brokkoli', 'original_url': 'https://kochkarussell.com/einfacher-nudelauflauf-brokkoli/', @@ -436,11 +461,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 23), 'mealplan_id': 196, 'recipe': dict({ 'description': 'Simple to prepare and ready in 25 minutes, this vegetarian miso noodle recipe can be eaten on its own or served as a side.', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': '5G1v', 'name': 'Miso Udon Noodles with Spinach and Tofu', 'original_url': 'https://www.allrecipes.com/recipe/284039/miso-udon-noodles-with-spinach-and-tofu/', @@ -456,11 +483,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 22), 'mealplan_id': 195, 'recipe': dict({ 'description': 'Avis aux nostalgiques des années 1980, la mousse de saumon est de retour dans une présentation adaptée au goût du jour. On utilise une technique sans faille : un saumon frais cuit au micro-ondes et mélangé au robot avec du fromage à la crème et de la crème sure. On obtient ainsi une texture onctueuse à tartiner, qui n’a rien à envier aux préparations gélatineuses d’antan !', 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'rrNL', 'name': 'Mousse de saumon', 'original_url': 'https://www.ricardocuisine.com/recettes/8919-mousse-de-saumon', @@ -476,6 +505,7 @@ 'description': 'Dineren met de boys', 'entry_type': , 'group_id': '3931df86-0679-4579-8c63-4bedc9ca9a85', + 'household_id': None, 'mealplan_date': FakeDate(2024, 1, 21), 'mealplan_id': 1, 'recipe': None, @@ -491,6 +521,7 @@ 'date_added': datetime.date(2024, 6, 29), 'description': 'The world’s most famous cake, the Original Sacher-Torte, is the consequence of several lucky twists of fate. The first was in 1832, when the Austrian State Chancellor, Prince Klemens Wenzel von Metternich, tasked his kitchen staff with concocting an extraordinary dessert to impress his special guests. As fortune had it, the chef had fallen ill that evening, leaving the apprentice chef, the then-16-year-old Franz Sacher, to perform this culinary magic trick. Metternich’s parting words to the talented teenager: “I hope you won’t disgrace me tonight.”', 'group_id': '24477569-f6af-4b53-9e3f-6d04b0ca6916', + 'household_id': None, 'image': 'SuPW', 'ingredients': list([ dict({ @@ -681,11 +712,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -705,11 +738,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', @@ -729,11 +764,13 @@ 'description': None, 'entry_type': , 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'mealplan_date': datetime.date(2024, 1, 22), 'mealplan_id': 230, 'recipe': dict({ 'description': "Een traybake is eigenlijk altijd een goed idee. Deze zoete aardappel curry traybake dus ook. Waarom? Omdat je alleen maar wat groenten - en in dit geval kip - op een bakplaat (traybake dus) legt, hier wat kruiden aan toevoegt en deze in de oven schuift. Ideaal dus als je geen zin hebt om lang in de keuken te staan. Maar gewoon lekker op de bank wil ploffen om te wachten tot de oven klaar is. Joe! That\\'s what we like. Deze zoete aardappel curry traybake bevat behalve zoete aardappel en curry ook kikkererwten, kippendijfilet en bloemkoolroosjes. Je gebruikt yoghurt en limoen als een soort dressing. En je serveert deze heerlijke traybake met naanbrood. Je kunt natuurljk ook voor deze traybake met chipolataworstjes gaan. Wil je graag meer ovengerechten? Dan moet je eigenlijk even kijken naar onze Ovenbijbel. Onmisbaar in je keuken! We willen je deze zoete aardappelstamppot met prei ook niet onthouden. Megalekker bordje comfortfood als je \\'t ons vraagt.", 'group_id': '0bf60b2e-ca89-42a9-94d4-8f67ca72b157', + 'household_id': None, 'image': 'AiIo', 'name': 'Zoete aardappel curry traybake', 'original_url': 'https://chickslovefood.com/recept/zoete-aardappel-curry-traybake/', diff --git a/tests/components/mealie/snapshots/test_todo.ambr b/tests/components/mealie/snapshots/test_todo.ambr index a580862535e..4c58a839f57 100644 --- a/tests/components/mealie/snapshots/test_todo.ambr +++ b/tests/components/mealie/snapshots/test_todo.ambr @@ -140,17 +140,3 @@ 'state': '3', }) # --- -# name: test_get_todo_list_items - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Mealie Supermarket', - 'supported_features': , - }), - 'context': , - 'entity_id': 'todo.mealie_supermarket', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '3', - }) -# --- diff --git a/tests/components/mealie/test_config_flow.py b/tests/components/mealie/test_config_flow.py index f2886578744..777d25fdef5 100644 --- a/tests/components/mealie/test_config_flow.py +++ b/tests/components/mealie/test_config_flow.py @@ -6,7 +6,7 @@ from aiomealie import About, MealieAuthenticationError, MealieConnectionError import pytest from homeassistant.components.mealie.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_API_TOKEN, CONF_HOST, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -152,11 +152,7 @@ async def test_reauth_flow( """Test reauth flow.""" await setup_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -179,11 +175,7 @@ async def test_reauth_flow_wrong_account( """Test reauth flow with wrong account.""" await setup_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -218,11 +210,7 @@ async def test_reauth_flow_exceptions( await setup_integration(hass, mock_config_entry) mock_mealie_client.get_user_info.side_effect = exception - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config_entry.entry_id}, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/meater/test_config_flow.py b/tests/components/meater/test_config_flow.py index b8c1be15268..9049cf4ac9a 100644 --- a/tests/components/meater/test_config_flow.py +++ b/tests/components/meater/test_config_flow.py @@ -123,11 +123,7 @@ async def test_reauth_flow(hass: HomeAssistant, mock_meater) -> None: ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/media_extractor/snapshots/test_init.ambr b/tests/components/media_extractor/snapshots/test_init.ambr index ed56f40af73..9731a415c00 100644 --- a/tests/components/media_extractor/snapshots/test_init.ambr +++ b/tests/components/media_extractor/snapshots/test_init.ambr @@ -30,15 +30,6 @@ 'media_content_type': 'VIDEO', }) # --- -# name: test_play_media_service - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694794256/ei/sC0EZYCPHbuZx_AP3bGz0Ac/ip/84.31.234.146/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr2---sn-5hnekn7k.googlevideo.com/mh/7c/mm/31,29/mn/sn-5hnekn7k,sn-5hne6nzy/ms/au,rdu/mv/m/mvi/2/pl/14/initcwndbps/2267500/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694772337/fvip/3/short_key/1/keepalive/yes/fexp/24007246,24362685/beids/24350018/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRgIhAIC0iobMnRschmQ3QaYsytXg9eg7l9B_-UNvMciis4bmAiEAg-3jr6SwOfAGCCU-JyTyxcXmraug-hPcjjJzm__43ug%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRQIhAOlqbgmuueNhIuGENYKCsdwiNAUPheXw-RMUqsiaB7YuAiANN43FxJl14Ve_H_c9K-aDoXG4sI7PDCqKDhov6Qro_g%3D%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-AUDIO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -57,24 +48,6 @@ 'media_content_type': 'AUDIO', }) # --- -# name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-VIDEO-audio_media_extractor_config] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://cf-media.sndcdn.com/50remGX1OqRY.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vNTByZW1HWDFPcVJZLjEyOC5tcDMqIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNjk0Nzk5MTc5fX19XX0_&Signature=JtF8BXxTCElhjCrhnSAq3W6z960VmdVXx7BPhQvI0MCxr~J43JFGO8CVw9-VBM2oEf14mqWo63-C0FO29DvUuBZnmLD3dhDfryVfWJsrix7voimoRDaNFE~3zntDbg7O2S8uWYyZK8OZC9anzwokvjH7jbmviWqK4~2IM9dwgejGgzrQU1aadV2Yro7NJZnF7SD~7tVjkM-hBg~X5zDYVxmGrdzN3tFoLwRmUch6RNDL~1DcWBk0AveBKQFAdBrFBjDDUeIyDz9Idhw2aG9~fjfckcf95KwqrVQxz1N5XEzfNDDo8xkUgDt0eb9dtXdwxLJ0swC6e5VLS8bsH91GMg__&Key-Pair-Id=APKAI6TU7MMXM5DG6EPQ', - 'media_content_type': 'VIDEO', - }) -# --- -# name: test_play_media_service[https://soundcloud.com/bruttoband/brutto-11-VIDEO-empty_media_extractor_config] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://cf-media.sndcdn.com/50remGX1OqRY.128.mp3?Policy=eyJTdGF0ZW1lbnQiOlt7IlJlc291cmNlIjoiKjovL2NmLW1lZGlhLnNuZGNkbi5jb20vNTByZW1HWDFPcVJZLjEyOC5tcDMqIiwiQ29uZGl0aW9uIjp7IkRhdGVMZXNzVGhhbiI6eyJBV1M6RXBvY2hUaW1lIjoxNjk0Nzk4NTkzfX19XX0_&Signature=flALJvEBnzS0ZOOhf0-07Ap~NURw2Gn2OqkeKKTTMX5HRGJw9eXFay79tcC4GsMMXWUgWoCx-n3yelpyilE2MOEIufBNUbjqRfMSJaX5YhYxjQdoDYuiU~gqBzJyPw9pKzr6P8~5HNKL3Idr0CNhUzdV6FQLaUPKMMibq9ghV833mUmdyvdk1~GZBc8MOg9GrTdcigGgpPzd-vrIMICMvFzFnwBOeOotxX2Vfqf9~wVekBKGlvB9A~7TlZ71lv9Fl9u4m8rse9E-mByweVc1M784ehJV3~tRPjuF~FXXWKP8x0nGJmoq7RAnG7iFIt~fQFmsfOq2o~PG7dHMRPh7hw__&Key-Pair-Id=APKAI6TU7MMXM5DG6EPQ', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://test.com/abc-AUDIO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -93,15 +66,6 @@ 'media_content_type': 'AUDIO', }) # --- -# name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-audio_media_extractor_config-] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694805268/ei/tFgEZcu0DoOD-gaqg47wBA/ip/45.93.75.130/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr3---sn-5hne6nzy.googlevideo.com/mh/7c/mm/31,29/mn/sn-5hne6nzy,sn-5hnekn7k/ms/au,rdu/mv/m/mvi/3/pl/22/initcwndbps/1957500/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694783146/fvip/2/short_key/1/keepalive/yes/fexp/24007246/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRQIhALAASH0_ZDQQoMA82qWNCXSHPZ0bb9TQldIs7AAxktiiAiASA5bQy7IAa6NwdGIOpfye5OgcY_BNuo0WgSdh84tosw%3D%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRgIhAIsDcLGH8KJpQpBgyJ5VWlDxfr75HyO8hMSVS9v7nRu4AiEA2xjtLZOzeNFoJlxwCsH3YqsUQt-BF_4gikhi_P4FbBc%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-audio_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', @@ -111,15 +75,6 @@ 'media_content_type': 'VIDEO', }) # --- -# name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-empty_media_extractor_config-] - ReadOnlyDict({ - 'entity_id': 'media_player.bedroom', - 'extra': dict({ - }), - 'media_content_id': 'https://manifest.googlevideo.com/api/manifest/hls_playlist/expire/1694805294/ei/zlgEZcCPFpqOx_APj42f2Ao/ip/45.93.75.130/id/750c38c3d5a05dc4/itag/616/source/youtube/requiressl/yes/ratebypass/yes/pfa/1/wft/1/sgovp/clen%3D99471214%3Bdur%3D212.040%3Bgir%3Dyes%3Bitag%3D356%3Blmt%3D1694043438471036/hls_chunk_host/rr3---sn-5hne6nzy.googlevideo.com/mh/7c/mm/31,26/mn/sn-5hne6nzy,sn-aigzrnld/ms/au,onr/mv/m/mvi/3/pl/22/initcwndbps/2095000/vprv/1/playlist_type/DVR/dover/13/txp/4532434/mt/1694783390/fvip/1/short_key/1/keepalive/yes/fexp/24007246,24362685/beids/24350017/sparams/expire,ei,ip,id,itag,source,requiressl,ratebypass,pfa,wft,sgovp,vprv,playlist_type/sig/AOq0QJ8wRgIhANCPwWNfq6wBp1Xo1L8bRJpDrzOyv7kfH_J65cZ_PRZLAiEAwo-0wQgeIjPe7OgyAAvMCx_A9wd1h8Qyh7VntKwGJUs%3D/lsparams/hls_chunk_host,mh,mm,mn,ms,mv,mvi,pl,initcwndbps/lsig/AG3C_xAwRQIgIqS9Ub_6L9ScKXr0T9bkeu6TZsEsyNApYfF_MqeukqECIQCMSeJ1sSEw5QGMgHAW8Fhsir4TYHEK5KVg-PzJbrT6hw%3D%3D/playlist/index.m3u8', - 'media_content_type': 'VIDEO', - }) -# --- # name: test_play_media_service[https://www.youtube.com/watch?v=dQw4w9WgXcQ-VIDEO-empty_media_extractor_config] ReadOnlyDict({ 'entity_id': 'media_player.bedroom', diff --git a/tests/components/media_player/common.py b/tests/components/media_player/common.py index 77076d903a6..c0cdfbf26d7 100644 --- a/tests/components/media_player/common.py +++ b/tests/components/media_player/common.py @@ -16,6 +16,7 @@ from homeassistant.components.media_player import ( SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, + MediaPlayerEnqueue, ) from homeassistant.const import ( ATTR_ENTITY_ID, @@ -35,70 +36,79 @@ from homeassistant.const import ( SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass -async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn on specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_on(hass, entity_id=ENTITY_MATCH_ALL): +def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn on specified media player or all.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn off specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) @bind_hass -def turn_off(hass, entity_id=ENTITY_MATCH_ALL): +def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn off specified media player or all.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_toggle(hass, entity_id=ENTITY_MATCH_ALL): +async def async_toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Toggle specified media player or all.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True) @bind_hass -def toggle(hass, entity_id=ENTITY_MATCH_ALL): +def toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Toggle specified media player or all.""" hass.add_job(async_toggle, hass, entity_id) -async def async_volume_up(hass, entity_id=ENTITY_MATCH_ALL): +async def async_volume_up( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for volume up.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_VOLUME_UP, data, blocking=True) @bind_hass -def volume_up(hass, entity_id=ENTITY_MATCH_ALL): +def volume_up(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for volume up.""" hass.add_job(async_volume_up, hass, entity_id) -async def async_volume_down(hass, entity_id=ENTITY_MATCH_ALL): +async def async_volume_down( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for volume down.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_VOLUME_DOWN, data, blocking=True) @bind_hass -def volume_down(hass, entity_id=ENTITY_MATCH_ALL): +def volume_down(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for volume down.""" hass.add_job(async_volume_down, hass, entity_id) -async def async_mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL): +async def async_mute_volume( + hass: HomeAssistant, mute: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for muting the volume.""" data = {ATTR_MEDIA_VOLUME_MUTED: mute} @@ -109,12 +119,16 @@ async def async_mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL): @bind_hass -def mute_volume(hass, mute, entity_id=ENTITY_MATCH_ALL): +def mute_volume( + hass: HomeAssistant, mute: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for muting the volume.""" hass.add_job(async_mute_volume, hass, mute, entity_id) -async def async_set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL): +async def async_set_volume_level( + hass: HomeAssistant, volume: float, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for setting the volume.""" data = {ATTR_MEDIA_VOLUME_LEVEL: volume} @@ -125,12 +139,16 @@ async def async_set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL): @bind_hass -def set_volume_level(hass, volume, entity_id=ENTITY_MATCH_ALL): +def set_volume_level( + hass: HomeAssistant, volume: float, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for setting the volume.""" hass.add_job(async_set_volume_level, hass, volume, entity_id) -async def async_media_play_pause(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_play_pause( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for play/pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -139,48 +157,56 @@ async def async_media_play_pause(hass, entity_id=ENTITY_MATCH_ALL): @bind_hass -def media_play_pause(hass, entity_id=ENTITY_MATCH_ALL): +def media_play_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for play/pause.""" hass.add_job(async_media_play_pause, hass, entity_id) -async def async_media_play(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_play( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for play/pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_PLAY, data, blocking=True) @bind_hass -def media_play(hass, entity_id=ENTITY_MATCH_ALL): +def media_play(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for play/pause.""" hass.add_job(async_media_play, hass, entity_id) -async def async_media_pause(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_pause( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for pause.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_PAUSE, data, blocking=True) @bind_hass -def media_pause(hass, entity_id=ENTITY_MATCH_ALL): +def media_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for pause.""" hass.add_job(async_media_pause, hass, entity_id) -async def async_media_stop(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_stop( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for stop.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_MEDIA_STOP, data, blocking=True) @bind_hass -def media_stop(hass, entity_id=ENTITY_MATCH_ALL): +def media_stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for stop.""" hass.add_job(async_media_stop, hass, entity_id) -async def async_media_next_track(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_next_track( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for next track.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -189,12 +215,14 @@ async def async_media_next_track(hass, entity_id=ENTITY_MATCH_ALL): @bind_hass -def media_next_track(hass, entity_id=ENTITY_MATCH_ALL): +def media_next_track(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for next track.""" hass.add_job(async_media_next_track, hass, entity_id) -async def async_media_previous_track(hass, entity_id=ENTITY_MATCH_ALL): +async def async_media_previous_track( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for prev track.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call( @@ -203,12 +231,16 @@ async def async_media_previous_track(hass, entity_id=ENTITY_MATCH_ALL): @bind_hass -def media_previous_track(hass, entity_id=ENTITY_MATCH_ALL): +def media_previous_track( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for prev track.""" hass.add_job(async_media_previous_track, hass, entity_id) -async def async_media_seek(hass, position, entity_id=ENTITY_MATCH_ALL): +async def async_media_seek( + hass: HomeAssistant, position: float, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command to seek in current playing media.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_MEDIA_SEEK_POSITION] = position @@ -216,14 +248,20 @@ async def async_media_seek(hass, position, entity_id=ENTITY_MATCH_ALL): @bind_hass -def media_seek(hass, position, entity_id=ENTITY_MATCH_ALL): +def media_seek( + hass: HomeAssistant, position: float, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command to seek in current playing media.""" hass.add_job(async_media_seek, hass, position, entity_id) async def async_play_media( - hass, media_type, media_id, entity_id=ENTITY_MATCH_ALL, enqueue=None -): + hass: HomeAssistant, + media_type: str, + media_id: str, + entity_id: str = ENTITY_MATCH_ALL, + enqueue: MediaPlayerEnqueue | bool | None = None, +) -> None: """Send the media player the command for playing media.""" data = {ATTR_MEDIA_CONTENT_TYPE: media_type, ATTR_MEDIA_CONTENT_ID: media_id} @@ -237,12 +275,20 @@ async def async_play_media( @bind_hass -def play_media(hass, media_type, media_id, entity_id=ENTITY_MATCH_ALL, enqueue=None): +def play_media( + hass: HomeAssistant, + media_type: str, + media_id: str, + entity_id: str = ENTITY_MATCH_ALL, + enqueue: MediaPlayerEnqueue | bool | None = None, +) -> None: """Send the media player the command for playing media.""" hass.add_job(async_play_media, hass, media_type, media_id, entity_id, enqueue) -async def async_select_source(hass, source, entity_id=ENTITY_MATCH_ALL): +async def async_select_source( + hass: HomeAssistant, source: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command to select input source.""" data = {ATTR_INPUT_SOURCE: source} @@ -253,18 +299,22 @@ async def async_select_source(hass, source, entity_id=ENTITY_MATCH_ALL): @bind_hass -def select_source(hass, source, entity_id=ENTITY_MATCH_ALL): +def select_source( + hass: HomeAssistant, source: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command to select input source.""" hass.add_job(async_select_source, hass, source, entity_id) -async def async_clear_playlist(hass, entity_id=ENTITY_MATCH_ALL): +async def async_clear_playlist( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Send the media player the command for clear playlist.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} await hass.services.async_call(DOMAIN, SERVICE_CLEAR_PLAYLIST, data, blocking=True) @bind_hass -def clear_playlist(hass, entity_id=ENTITY_MATCH_ALL): +def clear_playlist(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Send the media player the command for clear playlist.""" hass.add_job(async_clear_playlist, hass, entity_id) diff --git a/tests/components/media_player/test_async_helpers.py b/tests/components/media_player/test_async_helpers.py index 783846d8857..750d2861f21 100644 --- a/tests/components/media_player/test_async_helpers.py +++ b/tests/components/media_player/test_async_helpers.py @@ -17,7 +17,7 @@ from homeassistant.core import HomeAssistant class SimpleMediaPlayer(mp.MediaPlayerEntity): """Media player test class.""" - def __init__(self, hass): + def __init__(self, hass: HomeAssistant) -> None: """Initialize the test media player.""" self.hass = hass self._volume = 0 diff --git a/tests/components/melcloud/test_config_flow.py b/tests/components/melcloud/test_config_flow.py index c1c6c10ac4c..74b16aab6ed 100644 --- a/tests/components/melcloud/test_config_flow.py +++ b/tests/components/melcloud/test_config_flow.py @@ -9,7 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.melcloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE +from homeassistant.config_entries import SOURCE_RECONFIGURE from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -166,15 +166,7 @@ async def test_token_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -212,15 +204,7 @@ async def test_form_errors_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( "homeassistant.components.melcloud.async_setup_entry", @@ -270,15 +254,7 @@ async def test_client_errors_reauthentication( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) with patch( "homeassistant.components.melcloud.async_setup_entry", diff --git a/tests/components/met_eireann/snapshots/test_weather.ambr b/tests/components/met_eireann/snapshots/test_weather.ambr index 90f36d09d25..de8b69de18a 100644 --- a/tests/components/met_eireann/snapshots/test_weather.ambr +++ b/tests/components/met_eireann/snapshots/test_weather.ambr @@ -1,104 +1,4 @@ # serializer version: 1 -# name: test_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service[forecast] - dict({ - 'weather.somewhere': dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[forecast].1 - dict({ - 'weather.somewhere': dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }), - }) -# --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-08T12:00:00+00:00', - 'temperature': 10.0, - }), - dict({ - 'condition': 'lightning-rainy', - 'datetime': '2023-08-09T12:00:00+00:00', - 'temperature': 20.0, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.somewhere': dict({ diff --git a/tests/components/metoffice/snapshots/test_weather.ambr b/tests/components/metoffice/snapshots/test_weather.ambr index a6991a8631b..0bbc0e06a0a 100644 --- a/tests/components/metoffice/snapshots/test_weather.ambr +++ b/tests/components/metoffice/snapshots/test_weather.ambr @@ -1,658 +1,4 @@ # serializer version: 1 -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 13.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-25T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 19.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T18:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 17.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 14.0, - 'wind_bearing': 'NW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T00:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 13.0, - 'wind_bearing': 'WSW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T03:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T09:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T15:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T18:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T00:00:00+00:00', - 'precipitation_probability': 11, - 'temperature': 9.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T03:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 8.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T06:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 8.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 4, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T18:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-27T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T00:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 8.0, - 'wind_bearing': 'NNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 7.0, - 'wind_bearing': 'W', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-28T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 6.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-28T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T15:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T18:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NNE', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T00:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'E', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-29T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 8.0, - 'wind_bearing': 'SSE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T06:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 8.0, - 'wind_bearing': 'SE', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T09:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 10.0, - 'wind_bearing': 'SE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 47, - 'temperature': 12.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'pouring', - 'datetime': '2020-04-29T15:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T18:00:00+00:00', - 'precipitation_probability': 39, - 'temperature': 12.0, - 'wind_bearing': 'SSE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T21:00:00+00:00', - 'precipitation_probability': 19, - 'temperature': 11.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 13.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].3 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-25T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 19.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T18:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 17.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-25T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 14.0, - 'wind_bearing': 'NW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T00:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 13.0, - 'wind_bearing': 'WSW', - 'wind_speed': 3.22, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-26T03:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T09:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T12:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 12.0, - 'wind_bearing': 'WNW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T15:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 12.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T18:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 11.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-26T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T00:00:00+00:00', - 'precipitation_probability': 11, - 'temperature': 9.0, - 'wind_bearing': 'WNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T03:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 8.0, - 'wind_bearing': 'WNW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-27T06:00:00+00:00', - 'precipitation_probability': 14, - 'temperature': 8.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-27T12:00:00+00:00', - 'precipitation_probability': 4, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T15:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-27T18:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 10.0, - 'wind_bearing': 'NW', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-27T21:00:00+00:00', - 'precipitation_probability': 1, - 'temperature': 9.0, - 'wind_bearing': 'NW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T00:00:00+00:00', - 'precipitation_probability': 2, - 'temperature': 8.0, - 'wind_bearing': 'NNW', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2020-04-28T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 7.0, - 'wind_bearing': 'W', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2020-04-28T06:00:00+00:00', - 'precipitation_probability': 5, - 'temperature': 6.0, - 'wind_bearing': 'S', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-28T09:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T12:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'ENE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T15:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 12.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T18:00:00+00:00', - 'precipitation_probability': 10, - 'temperature': 11.0, - 'wind_bearing': 'N', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-28T21:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 10.0, - 'wind_bearing': 'NNE', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T00:00:00+00:00', - 'precipitation_probability': 6, - 'temperature': 9.0, - 'wind_bearing': 'E', - 'wind_speed': 6.44, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2020-04-29T03:00:00+00:00', - 'precipitation_probability': 3, - 'temperature': 8.0, - 'wind_bearing': 'SSE', - 'wind_speed': 11.27, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T06:00:00+00:00', - 'precipitation_probability': 9, - 'temperature': 8.0, - 'wind_bearing': 'SE', - 'wind_speed': 14.48, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T09:00:00+00:00', - 'precipitation_probability': 12, - 'temperature': 10.0, - 'wind_bearing': 'SE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T12:00:00+00:00', - 'precipitation_probability': 47, - 'temperature': 12.0, - 'wind_bearing': 'SE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'pouring', - 'datetime': '2020-04-29T15:00:00+00:00', - 'precipitation_probability': 59, - 'temperature': 13.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2020-04-29T18:00:00+00:00', - 'precipitation_probability': 39, - 'temperature': 12.0, - 'wind_bearing': 'SSE', - 'wind_speed': 17.7, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2020-04-29T21:00:00+00:00', - 'precipitation_probability': 19, - 'temperature': 11.0, - 'wind_bearing': 'SSE', - 'wind_speed': 20.92, - }), - ]), - }) -# --- -# name: test_forecast_service[get_forecast].4 - dict({ - 'forecast': list([ - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.met_office_wavertree_daily': dict({ diff --git a/tests/components/mfi/test_sensor.py b/tests/components/mfi/test_sensor.py index 49efdd5dc71..37512ca78f8 100644 --- a/tests/components/mfi/test_sensor.py +++ b/tests/components/mfi/test_sensor.py @@ -116,13 +116,13 @@ async def test_setup_adds_proper_devices(hass: HomeAssistant) -> None: @pytest.fixture(name="port") -def port_fixture(): +def port_fixture() -> mock.MagicMock: """Port fixture.""" return mock.MagicMock() @pytest.fixture(name="sensor") -def sensor_fixture(hass, port): +def sensor_fixture(hass: HomeAssistant, port: mock.MagicMock) -> mfi.MfiSensor: """Sensor fixture.""" sensor = mfi.MfiSensor(port, hass) sensor.hass = hass diff --git a/tests/components/microbees/test_config_flow.py b/tests/components/microbees/test_config_flow.py index d168dcd5017..f4e074d000d 100644 --- a/tests/components/microbees/test_config_flow.py +++ b/tests/components/microbees/test_config_flow.py @@ -6,7 +6,7 @@ from microBeesPy import MicroBeesException import pytest from homeassistant.components.microbees.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -144,14 +144,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -205,14 +198,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, config_entry) microbees.return_value.getMyProfile.return_value.id = 12345 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/microsoft/test_tts.py b/tests/components/microsoft/test_tts.py index dca760230ac..0f11501843e 100644 --- a/tests/components/microsoft/test_tts.py +++ b/tests/components/microsoft/test_tts.py @@ -20,9 +20,8 @@ from tests.typing import ClientSessionGenerator @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/microsoft_face/test_init.py b/tests/components/microsoft_face/test_init.py index 63014a095c0..0819dd82f21 100644 --- a/tests/components/microsoft_face/test_init.py +++ b/tests/components/microsoft_face/test_init.py @@ -31,7 +31,7 @@ async def setup_homeassistant(hass: HomeAssistant): await async_setup_component(hass, "homeassistant", {}) -def create_group(hass, name): +def create_group(hass: HomeAssistant, name: str) -> None: """Create a new person group. This is a legacy helper method. Do not use it for new tests. @@ -40,7 +40,7 @@ def create_group(hass, name): hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_CREATE_GROUP, data)) -def delete_group(hass, name): +def delete_group(hass: HomeAssistant, name: str) -> None: """Delete a person group. This is a legacy helper method. Do not use it for new tests. @@ -49,7 +49,7 @@ def delete_group(hass, name): hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_DELETE_GROUP, data)) -def train_group(hass, group): +def train_group(hass: HomeAssistant, group: str) -> None: """Train a person group. This is a legacy helper method. Do not use it for new tests. @@ -58,7 +58,7 @@ def train_group(hass, group): hass.async_create_task(hass.services.async_call(DOMAIN, SERVICE_TRAIN_GROUP, data)) -def create_person(hass, group, name): +def create_person(hass: HomeAssistant, group: str, name: str) -> None: """Create a person in a group. This is a legacy helper method. Do not use it for new tests. @@ -69,7 +69,7 @@ def create_person(hass, group, name): ) -def delete_person(hass, group, name): +def delete_person(hass: HomeAssistant, group: str, name: str) -> None: """Delete a person in a group. This is a legacy helper method. Do not use it for new tests. @@ -80,7 +80,9 @@ def delete_person(hass, group, name): ) -def face_person(hass, group, person, camera_entity): +def face_person( + hass: HomeAssistant, group: str, person: str, camera_entity: str +) -> None: """Add a new face picture to a person. This is a legacy helper method. Do not use it for new tests. diff --git a/tests/components/mikrotik/test_config_flow.py b/tests/components/mikrotik/test_config_flow.py index f34fde0c9a5..d95a6488fc7 100644 --- a/tests/components/mikrotik/test_config_flow.py +++ b/tests/components/mikrotik/test_config_flow.py @@ -175,14 +175,7 @@ async def test_reauth_success(hass: HomeAssistant, api) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -207,14 +200,7 @@ async def test_reauth_failed(hass: HomeAssistant, auth_error) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -240,14 +226,7 @@ async def test_reauth_failed_conn_error(hass: HomeAssistant, conn_error) -> None ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=DEMO_USER_INPUT, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/mobile_app/conftest.py b/tests/components/mobile_app/conftest.py index 9f0681d41f7..53e90cb61ae 100644 --- a/tests/components/mobile_app/conftest.py +++ b/tests/components/mobile_app/conftest.py @@ -1,6 +1,7 @@ """Tests for mobile_app component.""" from http import HTTPStatus +from typing import Any from aiohttp.test_utils import TestClient import pytest @@ -15,7 +16,9 @@ from tests.typing import ClientSessionGenerator @pytest.fixture -async def create_registrations(hass, webhook_client): +async def create_registrations( + hass: HomeAssistant, webhook_client: TestClient +) -> tuple[dict[str, Any], dict[str, Any]]: """Return two new registrations.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) @@ -37,7 +40,7 @@ async def create_registrations(hass, webhook_client): @pytest.fixture -async def push_registration(hass, webhook_client): +async def push_registration(hass: HomeAssistant, webhook_client: TestClient): """Return registration with push notifications enabled.""" await async_setup_component(hass, DOMAIN, {DOMAIN: {}}) diff --git a/tests/components/mobile_app/test_binary_sensor.py b/tests/components/mobile_app/test_binary_sensor.py index acebd8796b7..9ffb61f92ab 100644 --- a/tests/components/mobile_app/test_binary_sensor.py +++ b/tests/components/mobile_app/test_binary_sensor.py @@ -1,7 +1,9 @@ """Entity tests for mobile_app.""" from http import HTTPStatus +from typing import Any +from aiohttp.test_utils import TestClient import pytest from homeassistant.const import STATE_UNKNOWN @@ -12,8 +14,8 @@ from homeassistant.helpers import device_registry as dr async def test_sensor( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -98,7 +100,9 @@ async def test_sensor( async def test_sensor_must_register( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors must be registered before updating.""" webhook_id = create_registrations[1]["webhook_id"] @@ -122,8 +126,8 @@ async def test_sensor_must_register( async def test_sensor_id_no_dupes( hass: HomeAssistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test that a duplicate unique ID in registration updates the sensor.""" @@ -185,7 +189,9 @@ async def test_sensor_id_no_dupes( async def test_register_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -244,7 +250,9 @@ async def test_register_sensor_no_state( async def test_update_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be updated, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mobile_app/test_device_tracker.py b/tests/components/mobile_app/test_device_tracker.py index e3e2ce3227a..d1cbc21c36b 100644 --- a/tests/components/mobile_app/test_device_tracker.py +++ b/tests/components/mobile_app/test_device_tracker.py @@ -1,12 +1,17 @@ """Test mobile app device tracker.""" from http import HTTPStatus +from typing import Any + +from aiohttp.test_utils import TestClient from homeassistant.core import HomeAssistant async def test_sending_location( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( @@ -76,7 +81,9 @@ async def test_sending_location( async def test_restoring_location( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test sending a location via a webhook.""" resp = await webhook_client.post( diff --git a/tests/components/mobile_app/test_sensor.py b/tests/components/mobile_app/test_sensor.py index a7fb0ffc183..6411274fc4e 100644 --- a/tests/components/mobile_app/test_sensor.py +++ b/tests/components/mobile_app/test_sensor.py @@ -1,8 +1,10 @@ """Entity tests for mobile_app.""" from http import HTTPStatus +from typing import Any from unittest.mock import patch +from aiohttp.test_utils import TestClient import pytest from homeassistant.components.sensor import SensorDeviceClass @@ -14,7 +16,11 @@ from homeassistant.const import ( ) from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er -from homeassistant.util.unit_system import METRIC_SYSTEM, US_CUSTOMARY_SYSTEM +from homeassistant.util.unit_system import ( + METRIC_SYSTEM, + US_CUSTOMARY_SYSTEM, + UnitSystem, +) @pytest.mark.parametrize( @@ -28,12 +34,12 @@ async def test_sensor( hass: HomeAssistant, device_registry: dr.DeviceRegistry, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, - unit_system, - state_unit, - state1, - state2, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + unit_system: UnitSystem, + state_unit: UnitOfTemperature, + state1: str, + state2: str, ) -> None: """Test that sensors can be registered and updated.""" hass.config.units = unit_system @@ -149,13 +155,13 @@ async def test_sensor( ) async def test_sensor_migration( hass: HomeAssistant, - create_registrations, - webhook_client, - unique_id, - unit_system, - state_unit, - state1, - state2, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + unique_id: str, + unit_system: UnitSystem, + state_unit: UnitOfTemperature, + state1: str, + state2: str, ) -> None: """Test migration to RestoreSensor.""" hass.config.units = unit_system @@ -243,7 +249,9 @@ async def test_sensor_migration( async def test_sensor_must_register( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors must be registered before updating.""" webhook_id = create_registrations[1]["webhook_id"] @@ -265,8 +273,8 @@ async def test_sensor_must_register( async def test_sensor_id_no_dupes( hass: HomeAssistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, caplog: pytest.LogCaptureFixture, ) -> None: """Test that a duplicate unique ID in registration updates the sensor.""" @@ -331,7 +339,9 @@ async def test_sensor_id_no_dupes( async def test_register_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be registered, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -390,7 +400,9 @@ async def test_register_sensor_no_state( async def test_update_sensor_no_state( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be updated, when there is no (unknown) state.""" webhook_id = create_registrations[1]["webhook_id"] @@ -464,11 +476,11 @@ async def test_update_sensor_no_state( ) async def test_sensor_datetime( hass: HomeAssistant, - create_registrations, - webhook_client, - device_class, - native_value, - state_value, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + device_class: SensorDeviceClass, + native_value: str, + state_value: str, ) -> None: """Test that sensors can be registered and updated.""" webhook_id = create_registrations[1]["webhook_id"] @@ -505,8 +517,8 @@ async def test_sensor_datetime( async def test_default_disabling_entity( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors can be disabled by default upon registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -543,8 +555,8 @@ async def test_default_disabling_entity( async def test_updating_disabled_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that sensors return error if disabled in instance.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mobile_app/test_webhook.py b/tests/components/mobile_app/test_webhook.py index 77798c57f10..61e342a45ce 100644 --- a/tests/components/mobile_app/test_webhook.py +++ b/tests/components/mobile_app/test_webhook.py @@ -1,10 +1,13 @@ """Webhook tests for mobile_app.""" from binascii import unhexlify +from collections.abc import Callable from http import HTTPStatus import json +from typing import Any from unittest.mock import ANY, patch +from aiohttp.test_utils import TestClient from nacl.encoding import Base64Encoder from nacl.secret import SecretBox import pytest @@ -31,7 +34,7 @@ from tests.components.conversation import MockAgent @pytest.fixture -async def homeassistant(hass): +async def homeassistant(hass: HomeAssistant) -> None: """Load the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) @@ -93,7 +96,8 @@ def decrypt_payload_legacy(secret_key, encrypted_data): async def test_webhook_handle_render_template( - create_registrations, webhook_client + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we render templates properly.""" resp = await webhook_client.post( @@ -121,7 +125,9 @@ async def test_webhook_handle_render_template( async def test_webhook_handle_call_services( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we call services properly.""" calls = async_mock_service(hass, "test", "mobile_app") @@ -137,7 +143,9 @@ async def test_webhook_handle_call_services( async def test_webhook_handle_fire_event( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can fire events.""" events = [] @@ -161,7 +169,7 @@ async def test_webhook_handle_fire_event( assert events[0].data["hello"] == "yo world" -async def test_webhook_update_registration(webhook_client) -> None: +async def test_webhook_update_registration(webhook_client: TestClient) -> None: """Test that a we can update an existing registration via webhook.""" register_resp = await webhook_client.post( "/api/mobile_app/registrations", json=REGISTER_CLEARTEXT @@ -186,7 +194,9 @@ async def test_webhook_update_registration(webhook_client) -> None: async def test_webhook_handle_get_zones( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can get zones properly.""" # Zone is already loaded as part of the fixture, @@ -238,7 +248,9 @@ async def test_webhook_handle_get_zones( async def test_webhook_handle_get_config( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can get config properly.""" webhook_id = create_registrations[1]["webhook_id"] @@ -299,7 +311,9 @@ async def test_webhook_handle_get_config( async def test_webhook_returns_error_incorrect_json( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that an error is returned when JSON is invalid.""" resp = await webhook_client.post( @@ -323,7 +337,11 @@ async def test_webhook_returns_error_incorrect_json( ], ) async def test_webhook_handle_decryption( - hass: HomeAssistant, webhook_client, create_registrations, msg, generate_response + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + msg: dict[str, Any], + generate_response: Callable[[HomeAssistant], dict[str, Any]], ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -346,7 +364,8 @@ async def test_webhook_handle_decryption( async def test_webhook_handle_decryption_legacy( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -369,7 +388,9 @@ async def test_webhook_handle_decryption_legacy( async def test_webhook_handle_decryption_fail( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -412,7 +433,9 @@ async def test_webhook_handle_decryption_fail( async def test_webhook_handle_decryption_legacy_fail( - webhook_client, create_registrations, caplog: pytest.LogCaptureFixture + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, + caplog: pytest.LogCaptureFixture, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -455,7 +478,8 @@ async def test_webhook_handle_decryption_legacy_fail( async def test_webhook_handle_decryption_legacy_upgrade( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can encrypt/decrypt properly.""" key = create_registrations[0]["secret"] @@ -510,7 +534,8 @@ async def test_webhook_handle_decryption_legacy_upgrade( async def test_webhook_requires_encryption( - webhook_client, create_registrations + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that encrypted registrations only accept encrypted data.""" resp = await webhook_client.post( @@ -527,7 +552,9 @@ async def test_webhook_requires_encryption( async def test_webhook_update_location_without_locations( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" @@ -564,7 +591,9 @@ async def test_webhook_update_location_without_locations( async def test_webhook_update_location_with_gps( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( @@ -586,7 +615,9 @@ async def test_webhook_update_location_with_gps( async def test_webhook_update_location_with_gps_without_accuracy( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" resp = await webhook_client.post( @@ -604,7 +635,9 @@ async def test_webhook_update_location_with_gps_without_accuracy( async def test_webhook_update_location_with_location_name( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that location can be updated.""" @@ -666,7 +699,9 @@ async def test_webhook_update_location_with_location_name( async def test_webhook_enable_encryption( - hass: HomeAssistant, webhook_client, create_registrations + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that encryption can be added to a reg initially created without.""" webhook_id = create_registrations[1]["webhook_id"] @@ -717,7 +752,9 @@ async def test_webhook_enable_encryption( async def test_webhook_camera_stream_non_existent( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for a non-existent camera.""" webhook_id = create_registrations[1]["webhook_id"] @@ -736,7 +773,9 @@ async def test_webhook_camera_stream_non_existent( async def test_webhook_camera_stream_non_hls( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for a non-HLS/stream-supporting camera.""" hass.states.async_set("camera.non_stream_camera", "idle", {"supported_features": 0}) @@ -761,7 +800,9 @@ async def test_webhook_camera_stream_non_hls( async def test_webhook_camera_stream_stream_available( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for an HLS/stream-supporting camera.""" hass.states.async_set( @@ -791,7 +832,9 @@ async def test_webhook_camera_stream_stream_available( async def test_webhook_camera_stream_stream_available_but_errors( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test fetching camera stream URLs for an HLS/stream-supporting camera but that streaming errors.""" hass.states.async_set( @@ -823,8 +866,8 @@ async def test_webhook_camera_stream_stream_available_but_errors( async def test_webhook_handle_scan_tag( hass: HomeAssistant, device_registry: dr.DeviceRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can scan tags.""" device = device_registry.async_get_device(identifiers={(DOMAIN, "mock-device-id")}) @@ -847,7 +890,9 @@ async def test_webhook_handle_scan_tag( async def test_register_sensor_limits_state_class( - hass: HomeAssistant, create_registrations, webhook_client + hass: HomeAssistant, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we limit state classes to sensors only.""" webhook_id = create_registrations[1]["webhook_id"] @@ -890,8 +935,8 @@ async def test_register_sensor_limits_state_class( async def test_reregister_sensor( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can add more info in re-registration.""" webhook_id = create_registrations[1]["webhook_id"] @@ -992,11 +1037,11 @@ async def test_reregister_sensor( assert entry.original_icon is None +@pytest.mark.usefixtures("homeassistant") async def test_webhook_handle_conversation_process( hass: HomeAssistant, - homeassistant, - create_registrations, - webhook_client, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, mock_conversation_agent: MockAgent, ) -> None: """Test that we can converse.""" @@ -1042,9 +1087,8 @@ async def test_webhook_handle_conversation_process( async def test_sending_sensor_state( hass: HomeAssistant, entity_registry: er.EntityRegistry, - create_registrations, - webhook_client, - caplog: pytest.LogCaptureFixture, + create_registrations: tuple[dict[str, Any], dict[str, Any]], + webhook_client: TestClient, ) -> None: """Test that we can register and send sensor state as number and None.""" webhook_id = create_registrations[1]["webhook_id"] diff --git a/tests/components/mochad/test_light.py b/tests/components/mochad/test_light.py index 872bd3a9d61..49beebbaec6 100644 --- a/tests/components/mochad/test_light.py +++ b/tests/components/mochad/test_light.py @@ -18,7 +18,7 @@ def pymochad_mock(): @pytest.fixture -def light_mock(hass, brightness): +def light_mock(hass: HomeAssistant, brightness: int) -> mochad.MochadLight: """Mock light.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_light", "brightness_levels": brightness} diff --git a/tests/components/mochad/test_switch.py b/tests/components/mochad/test_switch.py index 750dd48296e..9fea3b5c14c 100644 --- a/tests/components/mochad/test_switch.py +++ b/tests/components/mochad/test_switch.py @@ -21,7 +21,7 @@ def pymochad_mock(): @pytest.fixture -def switch_mock(hass): +def switch_mock(hass: HomeAssistant) -> mochad.MochadSwitch: """Mock switch.""" controller_mock = mock.MagicMock() dev_dict = {"address": "a1", "name": "fake_switch"} diff --git a/tests/components/modbus/conftest.py b/tests/components/modbus/conftest.py index 6741504585a..5c612f9f8ad 100644 --- a/tests/components/modbus/conftest.py +++ b/tests/components/modbus/conftest.py @@ -37,7 +37,7 @@ TEST_PORT_SERIAL = "usb01" class ReadResult: """Storage class for register read results.""" - def __init__(self, register_words): + def __init__(self, register_words) -> None: """Init.""" self.registers = register_words self.bits = register_words @@ -192,7 +192,9 @@ async def mock_test_state_fixture( @pytest.fixture(name="mock_modbus_ha") -async def mock_modbus_ha_fixture(hass, mock_modbus): +async def mock_modbus_ha_fixture( + hass: HomeAssistant, mock_modbus: mock.AsyncMock +) -> mock.AsyncMock: """Load homeassistant to allow service calls.""" assert await async_setup_component(hass, "homeassistant", {}) await hass.async_block_till_done() diff --git a/tests/components/modbus/test_sensor.py b/tests/components/modbus/test_sensor.py index 20ff558fce6..87015fa634c 100644 --- a/tests/components/modbus/test_sensor.py +++ b/tests/components/modbus/test_sensor.py @@ -1335,7 +1335,7 @@ async def test_wrap_sensor(hass: HomeAssistant, mock_do_cycle, expected) -> None @pytest.fixture(name="mock_restore") -async def mock_restore(hass): +async def mock_restore(hass: HomeAssistant) -> None: """Mock restore cache.""" mock_restore_cache_with_extra_data( hass, diff --git a/tests/components/modern_forms/snapshots/test_diagnostics.ambr b/tests/components/modern_forms/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..56e299aa12a --- /dev/null +++ b/tests/components/modern_forms/snapshots/test_diagnostics.ambr @@ -0,0 +1,50 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'config_entry': dict({ + 'data': dict({ + 'host': '192.168.1.123', + 'mac': '**REDACTED**', + }), + 'disabled_by': None, + 'domain': 'modern_forms', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 1, + }), + 'device': dict({ + 'info': dict({ + 'client_id': 'MF_000000000000', + 'device_name': 'ModernFormsFan', + 'fan_motor_type': 'DC125X25', + 'fan_type': '1818-56', + 'federated_identity': 'us-east-1:f3da237b-c19c-4f61-b387-0e6dde2e470b', + 'firmware_url': '', + 'firmware_version': '01.03.0025', + 'light_type': 'F6IN-120V-R1-30', + 'mac_address': '**REDACTED**', + 'main_mcu_firmware_version': '01.03.3008', + 'owner': '**REDACTED**', + 'product_sku': '', + 'production_lot_number': '', + }), + 'status': dict({ + 'adaptive_learning_enabled': False, + 'away_mode_enabled': False, + 'fan_direction': 'forward', + 'fan_on': True, + 'fan_sleep_timer': 0, + 'fan_speed': 3, + 'light_brightness': 50, + 'light_on': True, + 'light_sleep_timer': 0, + }), + }), + }) +# --- diff --git a/tests/components/modern_forms/test_diagnostics.py b/tests/components/modern_forms/test_diagnostics.py new file mode 100644 index 00000000000..9eb2e4efa94 --- /dev/null +++ b/tests/components/modern_forms/test_diagnostics.py @@ -0,0 +1,26 @@ +"""Tests for the Modern Forms diagnostics platform.""" + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import init_integration + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test the creation and values of the Modern Forms fans.""" + entry = await init_integration(hass, aioclient_mock) + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert result == snapshot(exclude=props("created_at", "modified_at", "entry_id")) diff --git a/tests/components/monoprice/test_media_player.py b/tests/components/monoprice/test_media_player.py index f7d88692cf5..7d05003153d 100644 --- a/tests/components/monoprice/test_media_player.py +++ b/tests/components/monoprice/test_media_player.py @@ -1,6 +1,7 @@ """The tests for Monoprice Media player platform.""" from collections import defaultdict +from typing import Any from unittest.mock import patch from serial import SerialException @@ -58,7 +59,7 @@ class AttrDict(dict): class MockMonoprice: """Mock for pymonoprice object.""" - def __init__(self): + def __init__(self) -> None: """Init mock object.""" self.zones = defaultdict( lambda: AttrDict(power=True, volume=0, mute=True, source=1) @@ -105,7 +106,7 @@ async def test_cannot_connect(hass: HomeAssistant) -> None: assert hass.states.get(ZONE_1_ID) is None -async def _setup_monoprice(hass, monoprice): +async def _setup_monoprice(hass: HomeAssistant, monoprice: MockMonoprice) -> None: with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -116,7 +117,9 @@ async def _setup_monoprice(hass, monoprice): await hass.async_block_till_done() -async def _setup_monoprice_with_options(hass, monoprice): +async def _setup_monoprice_with_options( + hass: HomeAssistant, monoprice: MockMonoprice +) -> None: with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -129,7 +132,9 @@ async def _setup_monoprice_with_options(hass, monoprice): await hass.async_block_till_done() -async def _setup_monoprice_not_first_run(hass, monoprice): +async def _setup_monoprice_not_first_run( + hass: HomeAssistant, monoprice: MockMonoprice +) -> None: with patch( "homeassistant.components.monoprice.get_monoprice", new=lambda *a: monoprice, @@ -141,19 +146,17 @@ async def _setup_monoprice_not_first_run(hass, monoprice): await hass.async_block_till_done() -async def _call_media_player_service(hass, name, data): +async def _call_media_player_service( + hass: HomeAssistant, name: str, data: dict[str, Any] +) -> None: await hass.services.async_call( MEDIA_PLAYER_DOMAIN, name, service_data=data, blocking=True ) -async def _call_homeassistant_service(hass, name, data): - await hass.services.async_call( - "homeassistant", name, service_data=data, blocking=True - ) - - -async def _call_monoprice_service(hass, name, data): +async def _call_monoprice_service( + hass: HomeAssistant, name: str, data: dict[str, Any] +) -> None: await hass.services.async_call(DOMAIN, name, service_data=data, blocking=True) diff --git a/tests/components/monzo/test_config_flow.py b/tests/components/monzo/test_config_flow.py index b7d0de9cdc3..63daa2bfb43 100644 --- a/tests/components/monzo/test_config_flow.py +++ b/tests/components/monzo/test_config_flow.py @@ -154,14 +154,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -223,14 +216,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/motionblinds_ble/__init__.py b/tests/components/motionblinds_ble/__init__.py index c2385555dbf..e1caef9f51f 100644 --- a/tests/components/motionblinds_ble/__init__.py +++ b/tests/components/motionblinds_ble/__init__.py @@ -1 +1,16 @@ """Tests for the Motionblinds Bluetooth integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Mock a fully setup config entry.""" + + mock_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/motionblinds_ble/conftest.py b/tests/components/motionblinds_ble/conftest.py index 00db23734dd..f89cf4f305d 100644 --- a/tests/components/motionblinds_ble/conftest.py +++ b/tests/components/motionblinds_ble/conftest.py @@ -3,21 +3,140 @@ from collections.abc import Generator from unittest.mock import AsyncMock, Mock, patch +from motionblindsble.const import MotionBlindType import pytest -TEST_MAC = "abcd" -TEST_NAME = f"MOTION_{TEST_MAC.upper()}" -TEST_ADDRESS = "test_adress" +from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak +from homeassistant.components.motionblinds_ble.const import ( + CONF_BLIND_TYPE, + CONF_LOCAL_NAME, + CONF_MAC_CODE, + DOMAIN, +) +from homeassistant.const import CONF_ADDRESS + +from tests.common import MockConfigEntry +from tests.components.bluetooth import generate_advertisement_data, generate_ble_device -@pytest.fixture(name="motionblinds_ble_connect", autouse=True) -def motion_blinds_connect_fixture( - enable_bluetooth: None, +@pytest.fixture +def address() -> str: + """Address fixture.""" + return "cc:cc:cc:cc:cc:cc" + + +@pytest.fixture +def mac_code(address: str) -> str: + """MAC code fixture.""" + return "".join(address.split(":")[-3:-1]).upper() + + +@pytest.fixture +def display_name(mac_code: str) -> str: + """Display name fixture.""" + return f"Motionblind {mac_code.upper()}" + + +@pytest.fixture +def name(display_name: str) -> str: + """Name fixture.""" + return display_name.lower().replace(" ", "_") + + +@pytest.fixture +def local_name(mac_code: str) -> str: + """Local name fixture.""" + return f"MOTION_{mac_code.upper()}" + + +@pytest.fixture +def blind_type() -> MotionBlindType: + """Blind type fixture.""" + return MotionBlindType.ROLLER + + +@pytest.fixture +def service_info(local_name: str, address: str) -> BluetoothServiceInfoBleak: + """Service info fixture.""" + return BluetoothServiceInfoBleak( + name=local_name, + address=address, + device=generate_ble_device( + address=address, + name=local_name, + ), + rssi=-61, + manufacturer_data={000: b"test"}, + service_data={ + "test": bytearray(b"0000"), + }, + service_uuids=[ + "test", + ], + source="local", + advertisement=generate_advertisement_data( + manufacturer_data={000: b"test"}, + service_uuids=["test"], + ), + connectable=True, + time=0, + tx_power=-127, + ) + + +@pytest.fixture +def mock_motion_device( + blind_type: MotionBlindType, display_name: str +) -> Generator[AsyncMock]: + """Mock a MotionDevice.""" + + with patch( + "homeassistant.components.motionblinds_ble.MotionDevice", + autospec=True, + ) as mock_device: + device = mock_device.return_value + device.ble_device = Mock() + device.display_name = display_name + device.blind_type = blind_type + yield device + + +@pytest.fixture +def mock_config_entry( + blind_type: MotionBlindType, address: str, display_name: str, mac_code: str +) -> MockConfigEntry: + """Config entry fixture.""" + return MockConfigEntry( + title="mock_title", + domain=DOMAIN, + unique_id=address, + data={ + CONF_ADDRESS: address, + CONF_LOCAL_NAME: display_name, + CONF_MAC_CODE: mac_code, + CONF_BLIND_TYPE: blind_type.name.lower(), + }, + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.motionblinds_ble.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def motionblinds_ble_connect( + enable_bluetooth: None, local_name: str, address: str ) -> Generator[tuple[AsyncMock, Mock]]: """Mock motion blinds ble connection and entry setup.""" device = Mock() - device.name = TEST_NAME - device.address = TEST_ADDRESS + device.name = local_name + device.address = address bleak_scanner = AsyncMock() bleak_scanner.discover.return_value = [device] @@ -31,9 +150,5 @@ def motion_blinds_connect_fixture( "homeassistant.components.motionblinds_ble.config_flow.bluetooth.async_get_scanner", return_value=bleak_scanner, ), - patch( - "homeassistant.components.motionblinds_ble.async_setup_entry", - return_value=True, - ), ): yield bleak_scanner, device diff --git a/tests/components/motionblinds_ble/test_button.py b/tests/components/motionblinds_ble/test_button.py new file mode 100644 index 00000000000..9c27056c929 --- /dev/null +++ b/tests/components/motionblinds_ble/test_button.py @@ -0,0 +1,47 @@ +"""Tests for Motionblinds BLE buttons.""" + +from unittest.mock import Mock + +import pytest + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.components.motionblinds_ble.const import ( + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize( + ("button"), + [ + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, + ], +) +async def test_button( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + button: str, +) -> None: + """Test states of the button.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + BUTTON_DOMAIN, + SERVICE_PRESS, + {ATTR_ENTITY_ID: f"button.{name}_{button}"}, + blocking=True, + ) + getattr(mock_motion_device, button).assert_called_once() diff --git a/tests/components/motionblinds_ble/test_config_flow.py b/tests/components/motionblinds_ble/test_config_flow.py index 4cab12269dd..05d3077ceb1 100644 --- a/tests/components/motionblinds_ble/test_config_flow.py +++ b/tests/components/motionblinds_ble/test_config_flow.py @@ -12,41 +12,19 @@ from homeassistant.const import CONF_ADDRESS from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from .conftest import TEST_ADDRESS, TEST_MAC, TEST_NAME - from tests.common import MockConfigEntry -from tests.components.bluetooth import generate_advertisement_data, generate_ble_device - -TEST_BLIND_TYPE = MotionBlindType.ROLLER.name.lower() - -BLIND_SERVICE_INFO = BluetoothServiceInfoBleak( - name=TEST_NAME, - address=TEST_ADDRESS, - device=generate_ble_device( - address="cc:cc:cc:cc:cc:cc", - name=TEST_NAME, - ), - rssi=-61, - manufacturer_data={000: b"test"}, - service_data={ - "test": bytearray(b"0000"), - }, - service_uuids=[ - "test", - ], - source="local", - advertisement=generate_advertisement_data( - manufacturer_data={000: b"test"}, - service_uuids=["test"], - ), - connectable=True, - time=0, - tx_power=-127, -) @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_manual_success(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_config_flow_manual_success( + hass: HomeAssistant, + blind_type: MotionBlindType, + mac_code: str, + address: str, + local_name: str, + display_name: str, +) -> None: """Successful flow manually initialized by the user.""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -57,28 +35,36 @@ async def test_config_flow_manual_success(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_config_flow_manual_error_invalid_mac( + hass: HomeAssistant, + mac_code: str, + address: str, + local_name: str, + display_name: str, + blind_type: MotionBlindType, +) -> None: """Invalid MAC code error flow manually initialized by the user.""" # Initialize @@ -101,7 +87,7 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None # Recover result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -109,15 +95,15 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None # Finish flow result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} @@ -125,6 +111,7 @@ async def test_config_flow_manual_error_invalid_mac(hass: HomeAssistant) -> None @pytest.mark.usefixtures("motionblinds_ble_connect") async def test_config_flow_manual_error_no_bluetooth_adapter( hass: HomeAssistant, + mac_code: str, ) -> None: """No Bluetooth adapter error flow manually initialized by the user.""" @@ -153,14 +140,21 @@ async def test_config_flow_manual_error_no_bluetooth_adapter( ): result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == const.ERROR_NO_BLUETOOTH_ADAPTER +@pytest.mark.usefixtures("mock_setup_entry") async def test_config_flow_manual_error_could_not_find_motor( - hass: HomeAssistant, motionblinds_ble_connect: tuple[AsyncMock, Mock] + hass: HomeAssistant, + motionblinds_ble_connect: tuple[AsyncMock, Mock], + mac_code: str, + local_name: str, + display_name: str, + address: str, + blind_type: MotionBlindType, ) -> None: """Could not find motor error flow manually initialized by the user.""" @@ -176,17 +170,17 @@ async def test_config_flow_manual_error_could_not_find_motor( motionblinds_ble_connect[1].name = "WRONG_NAME" result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" assert result["errors"] == {"base": const.ERROR_COULD_NOT_FIND_MOTOR} # Recover - motionblinds_ble_connect[1].name = TEST_NAME + motionblinds_ble_connect[1].name = local_name result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "confirm" @@ -194,21 +188,23 @@ async def test_config_flow_manual_error_could_not_find_motor( # Finish flow result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} async def test_config_flow_manual_error_no_devices_found( - hass: HomeAssistant, motionblinds_ble_connect: tuple[AsyncMock, Mock] + hass: HomeAssistant, + motionblinds_ble_connect: tuple[AsyncMock, Mock], + mac_code: str, ) -> None: """No devices found error flow manually initialized by the user.""" @@ -224,19 +220,27 @@ async def test_config_flow_manual_error_no_devices_found( motionblinds_ble_connect[0].discover.return_value = [] result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_MAC_CODE: TEST_MAC}, + {const.CONF_MAC_CODE: mac_code}, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == const.ERROR_NO_DEVICES_FOUND @pytest.mark.usefixtures("motionblinds_ble_connect") -async def test_config_flow_bluetooth_success(hass: HomeAssistant) -> None: +async def test_config_flow_bluetooth_success( + hass: HomeAssistant, + mac_code: str, + service_info: BluetoothServiceInfoBleak, + address: str, + local_name: str, + display_name: str, + blind_type: MotionBlindType, +) -> None: """Successful bluetooth discovery flow.""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_BLUETOOTH}, - data=BLIND_SERVICE_INFO, + data=service_info, ) assert result["type"] is FlowResultType.FORM @@ -244,36 +248,32 @@ async def test_config_flow_bluetooth_success(hass: HomeAssistant) -> None: result = await hass.config_entries.flow.async_configure( result["flow_id"], - {const.CONF_BLIND_TYPE: MotionBlindType.ROLLER.name.lower()}, + {const.CONF_BLIND_TYPE: blind_type.name.lower()}, ) assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == f"Motionblind {TEST_MAC.upper()}" + assert result["title"] == display_name assert result["data"] == { - CONF_ADDRESS: TEST_ADDRESS, - const.CONF_LOCAL_NAME: TEST_NAME, - const.CONF_MAC_CODE: TEST_MAC.upper(), - const.CONF_BLIND_TYPE: TEST_BLIND_TYPE, + CONF_ADDRESS: address, + const.CONF_LOCAL_NAME: local_name, + const.CONF_MAC_CODE: mac_code, + const.CONF_BLIND_TYPE: blind_type.name.lower(), } assert result["options"] == {} -async def test_options_flow(hass: HomeAssistant) -> None: +@pytest.mark.usefixtures("mock_setup_entry") +async def test_options_flow( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: """Test the options flow.""" - entry = MockConfigEntry( - domain=const.DOMAIN, - unique_id="0123456789", - data={ - const.CONF_BLIND_TYPE: MotionBlindType.ROLLER, - }, - ) - entry.add_to_hass(hass) - - await hass.config_entries.async_setup(entry.entry_id) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.options.async_init(entry.entry_id) + result = await hass.config_entries.options.async_init(mock_config_entry.entry_id) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "init" diff --git a/tests/components/motionblinds_ble/test_cover.py b/tests/components/motionblinds_ble/test_cover.py new file mode 100644 index 00000000000..2f6b33b3017 --- /dev/null +++ b/tests/components/motionblinds_ble/test_cover.py @@ -0,0 +1,127 @@ +"""Tests for Motionblinds BLE covers.""" + +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import MotionBlindType, MotionRunningType +import pytest + +from homeassistant.components.cover import ( + ATTR_POSITION, + ATTR_TILT_POSITION, + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_CLOSE_COVER_TILT, + SERVICE_OPEN_COVER, + SERVICE_OPEN_COVER_TILT, + SERVICE_SET_COVER_POSITION, + SERVICE_SET_COVER_TILT_POSITION, + SERVICE_STOP_COVER, + SERVICE_STOP_COVER_TILT, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, +) +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize("blind_type", [MotionBlindType.VENETIAN]) +@pytest.mark.parametrize( + ("service", "method", "kwargs"), + [ + (SERVICE_OPEN_COVER, "open", {}), + (SERVICE_CLOSE_COVER, "close", {}), + (SERVICE_OPEN_COVER_TILT, "open_tilt", {}), + (SERVICE_CLOSE_COVER_TILT, "close_tilt", {}), + (SERVICE_SET_COVER_POSITION, "position", {ATTR_POSITION: 5}), + (SERVICE_SET_COVER_TILT_POSITION, "tilt", {ATTR_TILT_POSITION: 10}), + (SERVICE_STOP_COVER, "stop", {}), + (SERVICE_STOP_COVER_TILT, "stop", {}), + ], +) +async def test_cover_service( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + service: str, + method: str, + kwargs: dict[str, Any], +) -> None: + """Test cover service.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + COVER_DOMAIN, + service, + {ATTR_ENTITY_ID: f"cover.{name}", **kwargs}, + blocking=True, + ) + getattr(mock_motion_device, method).assert_called_once() + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize( + ("running_type", "state"), + [ + (None, "unknown"), + (MotionRunningType.STILL, "unknown"), + (MotionRunningType.OPENING, STATE_OPENING), + (MotionRunningType.CLOSING, STATE_CLOSING), + ], +) +async def test_cover_update_running( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + running_type: str | None, + state: str, +) -> None: + """Test updating running status.""" + + await setup_integration(hass, mock_config_entry) + + async_update_running = mock_motion_device.register_running_callback.call_args[0][0] + + async_update_running(running_type) + assert hass.states.get(f"cover.{name}").state == state + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize( + ("position", "tilt", "state"), + [ + (None, None, "unknown"), + (0, 0, STATE_OPEN), + (50, 90, STATE_OPEN), + (100, 180, STATE_CLOSED), + ], +) +async def test_cover_update_position( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + position: int, + tilt: int, + state: str, +) -> None: + """Test updating cover position and tilt.""" + + await setup_integration(hass, mock_config_entry) + + async_update_position = mock_motion_device.register_position_callback.call_args[0][ + 0 + ] + + async_update_position(position, tilt) + assert hass.states.get(f"cover.{name}").state == state diff --git a/tests/components/motionblinds_ble/test_entity.py b/tests/components/motionblinds_ble/test_entity.py new file mode 100644 index 00000000000..00369ba1e22 --- /dev/null +++ b/tests/components/motionblinds_ble/test_entity.py @@ -0,0 +1,55 @@ +"""Tests for Motionblinds BLE entities.""" + +from unittest.mock import Mock + +import pytest + +from homeassistant.components.homeassistant import ( + DOMAIN as HA_DOMAIN, + SERVICE_UPDATE_ENTITY, +) +from homeassistant.components.motionblinds_ble.const import ( + ATTR_CONNECT, + ATTR_DISCONNECT, + ATTR_FAVORITE, + ATTR_SPEED, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("motionblinds_ble_connect") +@pytest.mark.parametrize( + ("platform", "entity"), + [ + (Platform.BUTTON, ATTR_CONNECT), + (Platform.BUTTON, ATTR_DISCONNECT), + (Platform.BUTTON, ATTR_FAVORITE), + (Platform.SELECT, ATTR_SPEED), + ], +) +async def test_entity_update( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + platform: Platform, + entity: str, +) -> None: + """Test updating entity using homeassistant.update_entity.""" + + await async_setup_component(hass, HA_DOMAIN, {}) + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + HA_DOMAIN, + SERVICE_UPDATE_ENTITY, + {ATTR_ENTITY_ID: f"{platform.name.lower()}.{name}_{entity}"}, + blocking=True, + ) + getattr(mock_motion_device, "status_query").assert_called_once_with() diff --git a/tests/components/motionblinds_ble/test_init.py b/tests/components/motionblinds_ble/test_init.py new file mode 100644 index 00000000000..09596bd8d5e --- /dev/null +++ b/tests/components/motionblinds_ble/test_init.py @@ -0,0 +1,49 @@ +"""Tests for Motionblinds BLE init.""" + +from unittest.mock import patch + +from homeassistant.components.bluetooth.models import BluetoothServiceInfoBleak +from homeassistant.components.motionblinds_ble import options_update_listener +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.bluetooth import inject_bluetooth_service_info + + +async def test_options_update_listener( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, +) -> None: + """Test options_update_listener.""" + + await setup_integration(hass, mock_config_entry) + + with ( + patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_custom_disconnect_time" + ) as mock_set_custom_disconnect_time, + patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_permanent_connection" + ) as set_permanent_connection, + ): + await options_update_listener(hass, mock_config_entry) + mock_set_custom_disconnect_time.assert_called_once() + set_permanent_connection.assert_called_once() + + +async def test_update_ble_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + service_info: BluetoothServiceInfoBleak, +) -> None: + """Test async_update_ble_device.""" + + await setup_integration(hass, mock_config_entry) + + with patch( + "homeassistant.components.motionblinds_ble.MotionDevice.set_ble_device" + ) as mock_set_ble_device: + inject_bluetooth_service_info(hass, service_info) + mock_set_ble_device.assert_called_once() diff --git a/tests/components/motionblinds_ble/test_select.py b/tests/components/motionblinds_ble/test_select.py new file mode 100644 index 00000000000..2bd1bb30ec2 --- /dev/null +++ b/tests/components/motionblinds_ble/test_select.py @@ -0,0 +1,76 @@ +"""Tests for Motionblinds BLE selects.""" + +from collections.abc import Callable +from enum import Enum +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import MotionSpeedLevel +from motionblindsble.device import MotionDevice +import pytest + +from homeassistant.components.motionblinds_ble.const import ATTR_SPEED +from homeassistant.components.select import ( + DOMAIN as SELECT_DOMAIN, + SERVICE_SELECT_OPTION, +) +from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.parametrize(("select", "args"), [(ATTR_SPEED, MotionSpeedLevel.HIGH)]) +async def test_select( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + select: str, + args: Any, +) -> None: + """Test select.""" + + await setup_integration(hass, mock_config_entry) + + await hass.services.async_call( + SELECT_DOMAIN, + SERVICE_SELECT_OPTION, + { + ATTR_ENTITY_ID: f"select.{name}_{select}", + ATTR_OPTION: MotionSpeedLevel.HIGH.value, + }, + blocking=True, + ) + getattr(mock_motion_device, select).assert_called_once_with(args) + + +@pytest.mark.parametrize( + ("select", "register_callback", "value"), + [ + ( + ATTR_SPEED, + lambda device: device.register_speed_callback, + MotionSpeedLevel.HIGH, + ) + ], +) +async def test_select_update( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + select: str, + register_callback: Callable[[MotionDevice], Callable[..., None]], + value: type[Enum], +) -> None: + """Test select state update.""" + + await setup_integration(hass, mock_config_entry) + + update_func = register_callback(mock_motion_device).call_args[0][0] + + update_func(value) + assert hass.states.get(f"select.{name}_{select}").state == str(value.value) diff --git a/tests/components/motionblinds_ble/test_sensor.py b/tests/components/motionblinds_ble/test_sensor.py new file mode 100644 index 00000000000..c2468b876ae --- /dev/null +++ b/tests/components/motionblinds_ble/test_sensor.py @@ -0,0 +1,108 @@ +"""Tests for Motionblinds BLE sensors.""" + +from collections.abc import Callable +from typing import Any +from unittest.mock import Mock + +from motionblindsble.const import ( + MotionBlindType, + MotionCalibrationType, + MotionConnectionType, +) +from motionblindsble.device import MotionDevice +import pytest + +from homeassistant.components.motionblinds_ble.const import ( + ATTR_BATTERY, + ATTR_SIGNAL_STRENGTH, +) +from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("blind_type", [MotionBlindType.CURTAIN]) +@pytest.mark.parametrize( + ("sensor", "register_callback", "initial_value", "args", "expected_value"), + [ + ( + "connection_status", + lambda device: device.register_connection_callback, + MotionConnectionType.DISCONNECTED.value, + [MotionConnectionType.CONNECTING], + MotionConnectionType.CONNECTING.value, + ), + ( + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [25, True, False], + "25", + ), + ( # Battery unknown + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [None, False, False], + "unknown", + ), + ( # Wired + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [255, False, True], + "255", + ), + ( # Almost full + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [99, False, False], + "99", + ), + ( # Almost empty + ATTR_BATTERY, + lambda device: device.register_battery_callback, + "unknown", + [1, False, False], + "1", + ), + ( + "calibration_status", + lambda device: device.register_calibration_callback, + "unknown", + [MotionCalibrationType.CALIBRATING], + MotionCalibrationType.CALIBRATING.value, + ), + ( + ATTR_SIGNAL_STRENGTH, + lambda device: device.register_signal_strength_callback, + "unknown", + [-50], + "-50", + ), + ], +) +async def test_sensor( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_motion_device: Mock, + name: str, + sensor: str, + register_callback: Callable[[MotionDevice], Callable[..., None]], + initial_value: str, + args: list[Any], + expected_value: str, +) -> None: + """Test sensors.""" + + await setup_integration(hass, mock_config_entry) + + assert hass.states.get(f"{SENSOR_DOMAIN}.{name}_{sensor}").state == initial_value + update_func = register_callback(mock_motion_device).call_args[0][0] + update_func(*args) + assert hass.states.get(f"{SENSOR_DOMAIN}.{name}_{sensor}").state == expected_value diff --git a/tests/components/motioneye/test_config_flow.py b/tests/components/motioneye/test_config_flow.py index 816fb31933a..d2ec91b08e3 100644 --- a/tests/components/motioneye/test_config_flow.py +++ b/tests/components/motioneye/test_config_flow.py @@ -264,14 +264,7 @@ async def test_reauth(hass: HomeAssistant) -> None: config_entry = create_mock_motioneye_config_entry(hass, data=config_data) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] diff --git a/tests/components/mqtt/test_client.py b/tests/components/mqtt/test_client.py index cd02d805e1c..dcded7d187a 100644 --- a/tests/components/mqtt/test_client.py +++ b/tests/components/mqtt/test_client.py @@ -13,6 +13,7 @@ import pytest from homeassistant.components import mqtt from homeassistant.components.mqtt.client import RECONNECT_INTERVAL_SECONDS +from homeassistant.components.mqtt.const import SUPPORTED_COMPONENTS from homeassistant.components.mqtt.models import MessageCallbackType, ReceiveMessage from homeassistant.config_entries import ConfigEntryDisabler, ConfigEntryState from homeassistant.const import ( @@ -225,7 +226,7 @@ async def test_publish( async def test_convert_outgoing_payload(hass: HomeAssistant) -> None: """Test the converting of outgoing MQTT payloads without template.""" - command_template = mqtt.MqttCommandTemplate(None, hass=hass) + command_template = mqtt.MqttCommandTemplate(None) assert command_template.async_render(b"\xde\xad\xbe\xef") == b"\xde\xad\xbe\xef" assert ( command_template.async_render("b'\\xde\\xad\\xbe\\xef'") @@ -1614,8 +1615,9 @@ async def test_subscription_done_when_birth_message_is_sent( """Test sending birth message until initial subscription has been completed.""" mqtt_client_mock = setup_with_birth_msg_client_mock subscribe_calls = help_all_subscribe_calls(mqtt_client_mock) - assert ("homeassistant/+/+/config", 0) in subscribe_calls - assert ("homeassistant/+/+/+/config", 0) in subscribe_calls + for component in SUPPORTED_COMPONENTS: + assert (f"homeassistant/{component}/+/config", 0) in subscribe_calls + assert (f"homeassistant/{component}/+/+/config", 0) in subscribe_calls mqtt_client_mock.publish.assert_called_with( "homeassistant/status", "online", 0, False ) diff --git a/tests/components/mqtt/test_common.py b/tests/components/mqtt/test_common.py index 8d457d9da85..c135c29ebc5 100644 --- a/tests/components/mqtt/test_common.py +++ b/tests/components/mqtt/test_common.py @@ -16,7 +16,10 @@ import yaml from homeassistant import config as module_hass_config from homeassistant.components import mqtt from homeassistant.components.mqtt import debug_info -from homeassistant.components.mqtt.const import MQTT_CONNECTION_STATE +from homeassistant.components.mqtt.const import ( + MQTT_CONNECTION_STATE, + SUPPORTED_COMPONENTS, +) from homeassistant.components.mqtt.mixins import MQTT_ATTRIBUTES_BLOCKED from homeassistant.components.mqtt.models import PublishPayloadType from homeassistant.config_entries import ConfigEntryState @@ -42,6 +45,7 @@ DEFAULT_CONFIG_DEVICE_INFO_ID = { "manufacturer": "Whatever", "name": "Beer", "model": "Glass", + "model_id": "XYZ001", "hw_version": "rev1", "serial_number": "1234deadbeef", "sw_version": "0.1-beta", @@ -54,6 +58,7 @@ DEFAULT_CONFIG_DEVICE_INFO_MAC = { "manufacturer": "Whatever", "name": "Beer", "model": "Glass", + "model_id": "XYZ001", "hw_version": "rev1", "serial_number": "1234deadbeef", "sw_version": "0.1-beta", @@ -73,9 +78,12 @@ type _StateDataType = list[tuple[_MqttMessageType, str | None, _AttributesType | def help_all_subscribe_calls(mqtt_client_mock: MqttMockPahoClient) -> list[Any]: """Test of a call.""" all_calls = [] - for calls in mqtt_client_mock.subscribe.mock_calls: - for call in calls[1]: - all_calls.extend(call) + for call_l1 in mqtt_client_mock.subscribe.mock_calls: + if isinstance(call_l1[1][0], list): + for call_l2 in call_l1[1]: + all_calls.extend(call_l2) + else: + all_calls.append(call_l1[1]) return all_calls @@ -999,6 +1007,7 @@ async def help_test_entity_device_info_with_identifier( assert device.manufacturer == "Whatever" assert device.name == "Beer" assert device.model == "Glass" + assert device.model_id == "XYZ001" assert device.hw_version == "rev1" assert device.sw_version == "0.1-beta" assert device.suggested_area == "default_area" @@ -1035,6 +1044,7 @@ async def help_test_entity_device_info_with_connection( assert device.manufacturer == "Whatever" assert device.name == "Beer" assert device.model == "Glass" + assert device.model_id == "XYZ001" assert device.hw_version == "rev1" assert device.sw_version == "0.1-beta" assert device.suggested_area == "default_area" @@ -1174,7 +1184,10 @@ async def help_test_entity_id_update_subscriptions( state = hass.states.get(f"{domain}.test") assert state is not None - assert mqtt_mock.async_subscribe.call_count == len(topics) + 2 + DISCOVERY_COUNT + assert ( + mqtt_mock.async_subscribe.call_count + == len(topics) + 2 * len(SUPPORTED_COMPONENTS) + DISCOVERY_COUNT + ) for topic in topics: mqtt_mock.async_subscribe.assert_any_call( topic, ANY, ANY, ANY, HassJobType.Callback diff --git a/tests/components/mqtt/test_config_flow.py b/tests/components/mqtt/test_config_flow.py index 2b4cb20ccf9..d2f399899b1 100644 --- a/tests/components/mqtt/test_config_flow.py +++ b/tests/components/mqtt/test_config_flow.py @@ -14,6 +14,8 @@ import voluptuous as vol from homeassistant import config_entries from homeassistant.components import mqtt from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.components.hassio.addon_manager import AddonError +from homeassistant.components.hassio.handler import HassioAPIError from homeassistant.components.mqtt.config_flow import PWD_NOT_CHANGED from homeassistant.const import ( CONF_CLIENT_ID, @@ -28,6 +30,15 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry from tests.typing import MqttMockHAClientGenerator, MqttMockPahoClient +ADD_ON_DISCOVERY_INFO = { + "addon": "Mosquitto Mqtt Broker", + "host": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "protocol": "3.1.1", + "ssl": False, +} MOCK_CLIENT_CERT = b"## mock client certificate file ##" MOCK_CLIENT_KEY = b"## mock key file ##" @@ -186,6 +197,29 @@ def mock_process_uploaded_file( yield mock_upload +@pytest.fixture(name="supervisor") +def supervisor_fixture() -> Generator[MagicMock]: + """Mock Supervisor.""" + with patch( + "homeassistant.components.mqtt.config_flow.is_hassio", return_value=True + ) as is_hassio: + yield is_hassio + + +@pytest.fixture(name="addon_setup_time", autouse=True) +def addon_setup_time_fixture() -> Generator[int]: + """Mock add-on setup sleep time.""" + with patch( + "homeassistant.components.mqtt.config_flow.ADDON_SETUP_TIMEOUT", new=0 + ) as addon_setup_time: + yield addon_setup_time + + +@pytest.fixture(autouse=True) +def mock_get_addon_discovery_info(get_addon_discovery_info: AsyncMock) -> None: + """Mock get add-on discovery info.""" + + @pytest.mark.usefixtures("mqtt_client_mock") async def test_user_connection_works( hass: HomeAssistant, @@ -216,6 +250,47 @@ async def test_user_connection_works( assert len(mock_finish_setup.mock_calls) == 1 +@pytest.mark.usefixtures("mqtt_client_mock", "supervisor") +async def test_user_connection_works_with_supervisor( + hass: HomeAssistant, + mock_try_connection: MagicMock, + mock_finish_setup: MagicMock, +) -> None: + """Test we can finish a config flow with a supervised install.""" + mock_try_connection.return_value = True + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "broker"}, + ) + + # Assert a manual setup flow + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"broker": "127.0.0.1"} + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data == { + "broker": "127.0.0.1", + "port": 1883, + "discovery": True, + } + # Check we tried the connection + assert len(mock_try_connection.mock_calls) == 1 + # Check config entry got setup + assert len(mock_finish_setup.mock_calls) == 1 + await hass.async_block_till_done(wait_background_tasks=True) + + @pytest.mark.usefixtures("mqtt_client_mock") async def test_user_v5_connection_works( hass: HomeAssistant, @@ -382,16 +457,8 @@ async def test_hassio_confirm( result = await hass.config_entries.flow.async_init( "mqtt", data=HassioServiceInfo( - config={ - "addon": "Mock Addon", - "host": "mock-broker", - "port": 1883, - "username": "mock-user", - "password": "mock-pass", - "protocol": "3.1.1", # Set by the addon's discovery, ignored by HA - "ssl": False, # Set by the addon's discovery, ignored by HA - }, - name="Mock Addon", + config=ADD_ON_DISCOVERY_INFO.copy(), + name="Mosquitto Mqtt Broker", slug="mosquitto", uuid="1234", ), @@ -399,7 +466,7 @@ async def test_hassio_confirm( ) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "hassio_confirm" - assert result["description_placeholders"] == {"addon": "Mock Addon"} + assert result["description_placeholders"] == {"addon": "Mosquitto Mqtt Broker"} mock_try_connection_success.reset_mock() result = await hass.config_entries.flow.async_configure( @@ -408,7 +475,7 @@ async def test_hassio_confirm( assert result["type"] is FlowResultType.CREATE_ENTRY assert result["result"].data == { - "broker": "mock-broker", + "broker": "core-mosquitto", "port": 1883, "username": "mock-user", "password": "mock-pass", @@ -426,14 +493,12 @@ async def test_hassio_cannot_connect( mock_finish_setup: MagicMock, ) -> None: """Test a config flow is aborted when a connection was not successful.""" - mock_try_connection.return_value = True - result = await hass.config_entries.flow.async_init( "mqtt", data=HassioServiceInfo( config={ "addon": "Mock Addon", - "host": "mock-broker", + "host": "core-mosquitto", "port": 1883, "username": "mock-user", "password": "mock-pass", @@ -463,6 +528,362 @@ async def test_hassio_cannot_connect( assert len(mock_finish_setup.mock_calls) == 0 +@pytest.mark.usefixtures( + "mqtt_client_mock", "supervisor", "addon_info", "addon_running" +) +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +async def test_addon_flow_with_supervisor_addon_running( + hass: HomeAssistant, + mock_try_connection_success: MagicMock, + mock_finish_setup: MagicMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on is already installed, and running. + """ + # show menu + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + # select install via add-on + mock_try_connection_success.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data == { + "broker": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "discovery": True, + } + # Check we tried the connection + assert len(mock_try_connection_success.mock_calls) + # Check config entry got setup + assert len(mock_finish_setup.mock_calls) == 1 + + +@pytest.mark.usefixtures( + "mqtt_client_mock", "supervisor", "addon_info", "addon_installed", "start_addon" +) +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +async def test_addon_flow_with_supervisor_addon_installed( + hass: HomeAssistant, + mock_try_connection_success: MagicMock, + mock_finish_setup: MagicMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on is installed, but not running. + """ + # show menu + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + # select install via add-on + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + + # add-on installed but not started, so we wait for start-up + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "start_addon" + assert result["step_id"] == "start_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + mock_try_connection_success.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "start_addon"}, + ) + + # add-on is running, so entry can be installed + await hass.async_block_till_done(wait_background_tasks=True) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data == { + "broker": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "discovery": True, + } + # Check we tried the connection + assert len(mock_try_connection_success.mock_calls) + # Check config entry got setup + assert len(mock_finish_setup.mock_calls) == 1 + + +@pytest.mark.usefixtures( + "mqtt_client_mock", "supervisor", "addon_info", "addon_running" +) +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +async def test_addon_flow_with_supervisor_addon_running_connection_fails( + hass: HomeAssistant, + mock_try_connection: MagicMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on is already installed, and running. + """ + # show menu + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + # select install via add-on but the connection fails and the flow will be aborted. + mock_try_connection.return_value = False + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + await hass.async_block_till_done(wait_background_tasks=True) + assert result["type"] is FlowResultType.ABORT + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "addon_info", + "addon_installed", +) +async def test_addon_not_running_api_error( + hass: HomeAssistant, + start_addon: AsyncMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on start fails on a API error. + """ + start_addon.side_effect = HassioAPIError() + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + # add-on not installed, so we wait for install + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "start_addon" + assert result["step_id"] == "start_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "install_addon"}, + ) + + # add-on start-up failed + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "addon_start_failed" + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "start_addon", + "addon_installed", +) +async def test_addon_discovery_info_error( + hass: HomeAssistant, + addon_info: AsyncMock, + get_addon_discovery_info: AsyncMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on start on a discovery error. + """ + get_addon_discovery_info.side_effect = AddonError + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + # Addon will retry + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "start_addon" + assert result["step_id"] == "start_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "start_addon"}, + ) + + # add-on start-up failed + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "addon_start_failed" + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "start_addon", + "addon_installed", +) +async def test_addon_info_error( + hass: HomeAssistant, + addon_info: AsyncMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on info could not be retrieved. + """ + addon_info.side_effect = AddonError() + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + + # add-on info failed + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "addon_info_failed" + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "addon_info", + "addon_not_installed", + "install_addon", + "start_addon", +) +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +async def test_addon_flow_with_supervisor_addon_not_installed( + hass: HomeAssistant, + mock_try_connection_success: MagicMock, + mock_finish_setup: MagicMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on is not yet installed nor running. + """ + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + # add-on not installed, so we wait for install + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "install_addon"}, + ) + + # add-on installed but not started, so we wait for start-up + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "start_addon" + assert result["step_id"] == "start_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + mock_try_connection_success.reset_mock() + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "start_addon"}, + ) + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["result"].data == { + "broker": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + "discovery": True, + } + # Check we tried the connection + assert len(mock_try_connection_success.mock_calls) + # Check config entry got setup + assert len(mock_finish_setup.mock_calls) == 1 + + +@pytest.mark.usefixtures( + "mqtt_client_mock", + "supervisor", + "addon_info", + "addon_not_installed", + "start_addon", +) +async def test_addon_not_installed_failures( + hass: HomeAssistant, + install_addon: AsyncMock, +) -> None: + """Test we perform an auto config flow with a supervised install. + + Case: The Mosquitto add-on install fails. + """ + install_addon.side_effect = HassioAPIError() + + result = await hass.config_entries.flow.async_init( + "mqtt", context={"source": config_entries.SOURCE_USER} + ) + assert result["type"] is FlowResultType.MENU + assert result["menu_options"] == ["addon", "broker"] + assert result["step_id"] == "user" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "addon"}, + ) + # add-on not installed, so we wait for install + assert result["type"] is FlowResultType.SHOW_PROGRESS + assert result["progress_action"] == "install_addon" + assert result["step_id"] == "install_addon" + await hass.async_block_till_done() + await hass.async_block_till_done(wait_background_tasks=True) + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + {"next_step_id": "install_addon"}, + ) + + # add-on install failed + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "addon_install_failed" + + async def test_option_flow( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator, @@ -1130,14 +1551,7 @@ async def test_step_reauth( assert result["context"]["source"] == "reauth" # Show the form - result = await hass.config_entries.flow.async_init( - mqtt.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -1166,6 +1580,108 @@ async def test_step_reauth( await hass.async_block_till_done() +@pytest.mark.parametrize("discovery_info", [{"config": ADD_ON_DISCOVERY_INFO.copy()}]) +@pytest.mark.usefixtures( + "mqtt_client_mock", "mock_reload_after_entry_update", "supervisor", "addon_running" +) +async def test_step_hassio_reauth( + hass: HomeAssistant, mock_try_connection: MagicMock, addon_info: AsyncMock +) -> None: + """Test that the reauth step works in case the Mosquitto broker add-on was re-installed.""" + + # Set up entry data based on the discovery data, but with a stale password + entry_data = { + mqtt.CONF_BROKER: "core-mosquitto", + CONF_PORT: 1883, + CONF_USERNAME: "mock-user", + CONF_PASSWORD: "stale-secret", + } + + addon_info["hostname"] = "core-mosquitto" + + # Prepare the config entry + config_entry = MockConfigEntry(domain=mqtt.DOMAIN, data=entry_data) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + assert config_entry.data.get(CONF_PASSWORD) == "stale-secret" + + # Start reauth flow + mock_try_connection.reset_mock() + mock_try_connection.return_value = True + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 0 + + # Assert the entry is updated automatically + assert config_entry.data.get(CONF_PASSWORD) == "mock-pass" + mock_try_connection.assert_called_once_with( + { + "broker": "core-mosquitto", + "port": 1883, + "username": "mock-user", + "password": "mock-pass", + } + ) + + +@pytest.mark.parametrize( + ("discovery_info", "discovery_info_side_effect", "broker"), + [ + ({"config": ADD_ON_DISCOVERY_INFO.copy()}, AddonError, "core-mosquitto"), + ({"config": ADD_ON_DISCOVERY_INFO.copy()}, None, "broker-not-addon"), + ], +) +@pytest.mark.usefixtures( + "mqtt_client_mock", "mock_reload_after_entry_update", "supervisor", "addon_running" +) +async def test_step_hassio_reauth_no_discovery_info( + hass: HomeAssistant, + mock_try_connection: MagicMock, + addon_info: AsyncMock, + broker: str, +) -> None: + """Test hassio reauth flow defaults to manual flow. + + Test that the reauth step defaults to + normal reauth flow if fetching add-on discovery info failed, + or the broker is not the add-on. + """ + + # Set up entry data based on the discovery data, but with a stale password + entry_data = { + mqtt.CONF_BROKER: broker, + CONF_PORT: 1883, + CONF_USERNAME: "mock-user", + CONF_PASSWORD: "wrong-pass", + } + + addon_info["hostname"] = "core-mosquitto" + + # Prepare the config entry + config_entry = MockConfigEntry(domain=mqtt.DOMAIN, data=entry_data) + config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(config_entry.entry_id) + + assert config_entry.data.get(CONF_PASSWORD) == "wrong-pass" + + # Start reauth flow + mock_try_connection.reset_mock() + mock_try_connection.return_value = True + config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "reauth_confirm" + assert result["context"]["source"] == "reauth" + + # Assert the entry is not updated + assert config_entry.data.get(CONF_PASSWORD) == "wrong-pass" + mock_try_connection.assert_not_called() + + async def test_options_user_connection_fails( hass: HomeAssistant, mock_try_connection_time_out: MagicMock ) -> None: diff --git a/tests/components/mqtt/test_device_trigger.py b/tests/components/mqtt/test_device_trigger.py index 10322dd9046..1acfe8dd9f5 100644 --- a/tests/components/mqtt/test_device_trigger.py +++ b/tests/components/mqtt/test_device_trigger.py @@ -18,7 +18,7 @@ from homeassistant.setup import async_setup_component from .test_common import help_test_unload_config_entry from tests.common import async_fire_mqtt_message, async_get_device_automations -from tests.typing import MqttMockHAClient, MqttMockHAClientGenerator, WebSocketGenerator +from tests.typing import MqttMockHAClientGenerator, WebSocketGenerator @pytest.fixture(autouse=True, name="stub_blueprint_populate") @@ -1672,11 +1672,11 @@ async def test_trigger_debug_info( assert debug_info_data["triggers"][0]["discovery_data"]["payload"] == config2 +@pytest.mark.usefixtures("mqtt_mock") async def test_unload_entry( hass: HomeAssistant, service_calls: list[ServiceCall], device_registry: dr.DeviceRegistry, - mqtt_mock: MqttMockHAClient, ) -> None: """Test unloading the MQTT entry.""" @@ -1738,3 +1738,4 @@ async def test_unload_entry( async_fire_mqtt_message(hass, "foobar/triggers/button1", "short_press") await hass.async_block_till_done() assert len(service_calls) == 2 + await hass.async_block_till_done(wait_background_tasks=True) diff --git a/tests/components/mqtt/test_discovery.py b/tests/components/mqtt/test_discovery.py index 58de3c53c52..7f58fc75dae 100644 --- a/tests/components/mqtt/test_discovery.py +++ b/tests/components/mqtt/test_discovery.py @@ -15,6 +15,7 @@ from homeassistant.components.mqtt.abbreviations import ( ABBREVIATIONS, DEVICE_ABBREVIATIONS, ) +from homeassistant.components.mqtt.const import SUPPORTED_COMPONENTS from homeassistant.components.mqtt.discovery import ( MQTT_DISCOVERY_DONE, MQTT_DISCOVERY_NEW, @@ -73,13 +74,10 @@ async def test_subscribing_config_topic( discovery_topic = "homeassistant" await async_start(hass, discovery_topic, entry) - call_args1 = mqtt_mock.async_subscribe.mock_calls[0][1] - assert call_args1[2] == 0 - call_args2 = mqtt_mock.async_subscribe.mock_calls[1][1] - assert call_args2[2] == 0 - topics = [call_args1[0], call_args2[0]] - assert discovery_topic + "/+/+/config" in topics - assert discovery_topic + "/+/+/+/config" in topics + topics = [call[1][0] for call in mqtt_mock.async_subscribe.mock_calls] + for component in SUPPORTED_COMPONENTS: + assert f"{discovery_topic}/{component}/+/config" in topics + assert f"{discovery_topic}/{component}/+/+/config" in topics @pytest.mark.parametrize( @@ -198,8 +196,6 @@ async def test_only_valid_components( await hass.async_block_till_done() - assert f"Integration {invalid_component} is not supported" in caplog.text - assert not mock_dispatcher_send.called diff --git a/tests/components/mqtt/test_init.py b/tests/components/mqtt/test_init.py index 51379dc8508..5dab5689518 100644 --- a/tests/components/mqtt/test_init.py +++ b/tests/components/mqtt/test_init.py @@ -89,12 +89,12 @@ async def test_command_template_value(hass: HomeAssistant) -> None: # test rendering value tpl = template.Template("{{ value + 1 }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) assert cmd_tpl.async_render(4321) == "4322" # test variables at rendering tpl = template.Template("{{ some_var }}", hass=hass) - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) assert cmd_tpl.async_render(None, variables=variables) == "beer" @@ -161,8 +161,8 @@ async def test_command_template_variables( async def test_command_template_fails(hass: HomeAssistant) -> None: """Test the exception handling of an MQTT command template.""" - tpl = template.Template("{{ value * 2 }}") - cmd_tpl = mqtt.MqttCommandTemplate(tpl, hass=hass) + tpl = template.Template("{{ value * 2 }}", hass=hass) + cmd_tpl = mqtt.MqttCommandTemplate(tpl) with pytest.raises(MqttCommandTemplateException) as exc: cmd_tpl.async_render(None) assert "unsupported operand type(s) for *: 'NoneType' and 'int'" in str(exc.value) @@ -174,13 +174,13 @@ async def test_value_template_value(hass: HomeAssistant) -> None: variables = {"id": 1234, "some_var": "beer"} # test rendering value - tpl = template.Template("{{ value_json.id }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass) + tpl = template.Template("{{ value_json.id }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl) assert val_tpl.async_render_with_possible_json_value('{"id": 4321}') == "4321" # test variables at rendering - tpl = template.Template("{{ value_json.id }} {{ some_var }} {{ code }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass, config_attributes={"code": 1234}) + tpl = template.Template("{{ value_json.id }} {{ some_var }} {{ code }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl, config_attributes={"code": 1234}) assert ( val_tpl.async_render_with_possible_json_value( '{"id": 4321}', variables=variables @@ -189,8 +189,8 @@ async def test_value_template_value(hass: HomeAssistant) -> None: ) # test with default value if an error occurs due to an invalid template - tpl = template.Template("{{ value_json.id | as_datetime }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass) + tpl = template.Template("{{ value_json.id | as_datetime }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl) assert ( val_tpl.async_render_with_possible_json_value('{"otherid": 4321}', "my default") == "my default" @@ -200,19 +200,19 @@ async def test_value_template_value(hass: HomeAssistant) -> None: entity = Entity() entity.hass = hass entity.entity_id = "select.test" - tpl = template.Template("{{ value_json.id }}") + tpl = template.Template("{{ value_json.id }}", hass=hass) val_tpl = mqtt.MqttValueTemplate(tpl, entity=entity) assert val_tpl.async_render_with_possible_json_value('{"id": 4321}') == "4321" # test this object in a template - tpl2 = template.Template("{{ this.entity_id }}") + tpl2 = template.Template("{{ this.entity_id }}", hass=hass) val_tpl2 = mqtt.MqttValueTemplate(tpl2, entity=entity) assert val_tpl2.async_render_with_possible_json_value("bla") == "select.test" with patch( "homeassistant.helpers.template.TemplateStateFromEntityId", MagicMock() ) as template_state_calls: - tpl3 = template.Template("{{ this.entity_id }}") + tpl3 = template.Template("{{ this.entity_id }}", hass=hass) val_tpl3 = mqtt.MqttValueTemplate(tpl3, entity=entity) val_tpl3.async_render_with_possible_json_value("call1") val_tpl3.async_render_with_possible_json_value("call2") @@ -223,8 +223,8 @@ async def test_value_template_fails(hass: HomeAssistant) -> None: """Test the rendering of MQTT value template fails.""" entity = MockEntity(entity_id="sensor.test") entity.hass = hass - tpl = template.Template("{{ value_json.some_var * 2 }}") - val_tpl = mqtt.MqttValueTemplate(tpl, hass=hass, entity=entity) + tpl = template.Template("{{ value_json.some_var * 2 }}", hass=hass) + val_tpl = mqtt.MqttValueTemplate(tpl, entity=entity) with pytest.raises(MqttValueTemplateException) as exc: val_tpl.async_render_with_possible_json_value('{"some_var": null }') assert str(exc.value) == ( @@ -420,6 +420,74 @@ async def test_mqtt_publish_action_call_with_template_payload_renders_template( mqtt_mock.reset_mock() +@pytest.mark.parametrize( + ("attr_payload", "payload", "evaluate_payload", "literal_eval_calls"), + [ + ("b'\\xde\\xad\\xbe\\xef'", b"\xde\xad\xbe\xef", True, 1), + ("b'\\xde\\xad\\xbe\\xef'", "b'\\xde\\xad\\xbe\\xef'", False, 0), + ("DEADBEEF", "DEADBEEF", False, 0), + ( + "b'\\xde", + "b'\\xde", + True, + 1, + ), # Bytes literal is invalid, fall back to string + ], +) +async def test_mqtt_publish_action_call_with_raw_data( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + attr_payload: str, + payload: str | bytes, + evaluate_payload: bool, + literal_eval_calls: int, +) -> None: + """Test the mqtt publish action call raw data. + + When `payload` represents a `bytes` object, it should be published + as raw data if `evaluate_payload` is set. + """ + mqtt_mock = await mqtt_mock_entry() + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + mqtt.ATTR_EVALUATE_PAYLOAD: evaluate_payload, + }, + blocking=True, + ) + assert mqtt_mock.async_publish.called + assert mqtt_mock.async_publish.call_args[0][1] == payload + + with patch( + "homeassistant.components.mqtt.models.literal_eval" + ) as literal_eval_mock: + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + }, + blocking=True, + ) + literal_eval_mock.assert_not_called() + + await hass.services.async_call( + mqtt.DOMAIN, + mqtt.SERVICE_PUBLISH, + { + mqtt.ATTR_TOPIC: "test/topic", + mqtt.ATTR_PAYLOAD: attr_payload, + mqtt.ATTR_EVALUATE_PAYLOAD: evaluate_payload, + }, + blocking=True, + ) + assert len(literal_eval_mock.mock_calls) == literal_eval_calls + + # The use of a payload_template in an mqtt publish action call # has been deprecated with HA Core 2024.8.0 and will be removed with HA Core 2025.2.0 async def test_publish_action_call_with_bad_payload_template( @@ -2390,7 +2458,6 @@ async def test_multi_platform_discovery( "PayloadSentinel", "PublishPayloadType", "ReceiveMessage", - "ReceivePayloadType", "async_prepare_subscribe_topics", "async_publish", "async_subscribe", diff --git a/tests/components/mqtt/test_lawn_mower.py b/tests/components/mqtt/test_lawn_mower.py index 4906f6cfda3..101a45787ef 100644 --- a/tests/components/mqtt/test_lawn_mower.py +++ b/tests/components/mqtt/test_lawn_mower.py @@ -103,6 +103,13 @@ async def test_run_lawn_mower_setup_and_state_updates( state = hass.states.get("lawn_mower.test_lawn_mower") assert state.state == "mowing" + async_fire_mqtt_message(hass, "test/lawn_mower_stat", "returning") + + await hass.async_block_till_done() + + state = hass.states.get("lawn_mower.test_lawn_mower") + assert state.state == "returning" + async_fire_mqtt_message(hass, "test/lawn_mower_stat", "docked") await hass.async_block_till_done() @@ -198,6 +205,13 @@ async def test_value_template( state = hass.states.get("lawn_mower.test_lawn_mower") assert state.state == "paused" + async_fire_mqtt_message(hass, "test/lawn_mower_stat", '{"val":"returning"}') + + await hass.async_block_till_done() + + state = hass.states.get("lawn_mower.test_lawn_mower") + assert state.state == "returning" + async_fire_mqtt_message(hass, "test/lawn_mower_stat", '{"val": null}') await hass.async_block_till_done() @@ -702,7 +716,8 @@ async def test_mqtt_payload_not_a_valid_activity_warning( assert ( "Invalid activity for lawn_mower.test_lawn_mower: 'painting' " - "(valid activities: ['error', 'paused', 'mowing', 'docked'])" in caplog.text + "(valid activities: ['error', 'paused', 'mowing', 'docked', 'returning'])" + in caplog.text ) @@ -774,6 +789,7 @@ async def test_reloadable( [ ("activity_state_topic", "paused", None, "paused"), ("activity_state_topic", "docked", None, "docked"), + ("activity_state_topic", "returning", None, "returning"), ("activity_state_topic", "mowing", None, "mowing"), ], ) diff --git a/tests/components/mqtt/test_legacy_vacuum.py b/tests/components/mqtt/test_legacy_vacuum.py deleted file mode 100644 index 9b45b65d2cc..00000000000 --- a/tests/components/mqtt/test_legacy_vacuum.py +++ /dev/null @@ -1,83 +0,0 @@ -"""The tests for the Legacy Mqtt vacuum platform.""" - -# The legacy schema for MQTT vacuum was deprecated with HA Core 2023.8.0 -# and was removed with HA Core 2024.2.0 -# cleanup is planned with HA Core 2025.2 - -import json - -import pytest - -from homeassistant.components import mqtt, vacuum -from homeassistant.core import HomeAssistant -from homeassistant.helpers.typing import DiscoveryInfoType - -from tests.common import async_fire_mqtt_message -from tests.typing import MqttMockHAClientGenerator - -DEFAULT_CONFIG = {mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}} - - -@pytest.mark.parametrize( - ("hass_config", "removed"), - [ - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "legacy"}}}, True), - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test"}}}, False), - ({mqtt.DOMAIN: {vacuum.DOMAIN: {"name": "test", "schema": "state"}}}, True), - ], -) -async def test_removed_support_yaml( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, - removed: bool, -) -> None: - """Test that the removed support validation for the legacy schema works.""" - assert await mqtt_mock_entry() - entity = hass.states.get("vacuum.test") - - if removed: - assert entity is None - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - else: - assert entity is not None - - -@pytest.mark.parametrize( - ("config", "removed"), - [ - ({"name": "test", "schema": "legacy"}, True), - ({"name": "test"}, False), - ({"name": "test", "schema": "state"}, True), - ], -) -async def test_removed_support_discovery( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, - config: DiscoveryInfoType, - removed: bool, -) -> None: - """Test that the removed support validation for the legacy schema works.""" - assert await mqtt_mock_entry() - - config_payload = json.dumps(config) - async_fire_mqtt_message(hass, "homeassistant/vacuum/test/config", config_payload) - await hass.async_block_till_done() - - entity = hass.states.get("vacuum.test") - assert entity is not None - - if removed: - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - else: - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" not in caplog.text - ) diff --git a/tests/components/mqtt/test_sensor.py b/tests/components/mqtt/test_sensor.py index 4b117aaa4d5..a62c36404ca 100644 --- a/tests/components/mqtt/test_sensor.py +++ b/tests/components/mqtt/test_sensor.py @@ -3,6 +3,7 @@ import copy from datetime import datetime, timedelta import json +import logging from pathlib import Path from typing import Any from unittest.mock import MagicMock, patch @@ -110,6 +111,48 @@ async def test_setting_sensor_value_via_mqtt_message( assert state.attributes.get("unit_of_measurement") == "fav unit" +@pytest.mark.parametrize( + "hass_config", + [ + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "device_class": "enum", + "options": ["red", "green", "blue"], + } + } + }, + ], +) +async def test_setting_enum_sensor_value_via_mqtt_message( + hass: HomeAssistant, + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test the setting of the value via MQTT of an enum type sensor.""" + await mqtt_mock_entry() + + async_fire_mqtt_message(hass, "test-topic", "red") + state = hass.states.get("sensor.test") + assert state.state == "red" + + async_fire_mqtt_message(hass, "test-topic", "green") + state = hass.states.get("sensor.test") + assert state.state == "green" + + with caplog.at_level(logging.WARNING): + async_fire_mqtt_message(hass, "test-topic", "yellow") + assert ( + "Ignoring invalid option received on topic 'test-topic', " + "got 'yellow', allowed: red, green, blue" in caplog.text + ) + # Assert the state update was filtered out and ignored + state = hass.states.get("sensor.test") + assert state.state == "green" + + @pytest.mark.parametrize( "hass_config", [ @@ -874,6 +917,61 @@ async def test_invalid_state_class( assert "expected SensorStateClass or one of" in caplog.text +@pytest.mark.parametrize( + ("hass_config", "error_logged"), + [ + ( + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "state_class": "measurement", + "options": ["red", "green", "blue"], + } + } + }, + "Specifying `options` is not allowed together with the `state_class` " + "or `unit_of_measurement` option", + ), + ( + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "device_class": "gas", + "options": ["red", "green", "blue"], + } + } + }, + "The option `options` can only be used together with " + "device class `enum`, got `device_class` 'gas'", + ), + ( + { + mqtt.DOMAIN: { + sensor.DOMAIN: { + "name": "test", + "state_topic": "test-topic", + "options": [], + } + } + }, + "An empty options list is not allowed", + ), + ], +) +async def test_invalid_options_config( + mqtt_mock_entry: MqttMockHAClientGenerator, + caplog: pytest.LogCaptureFixture, + error_logged: str, +) -> None: + """Test state_class, deviceclass with sensor options.""" + assert await mqtt_mock_entry() + assert error_logged in caplog.text + + @pytest.mark.parametrize( "hass_config", [ @@ -891,6 +989,13 @@ async def test_invalid_state_class( "state_topic": "test-topic", "state_class": None, }, + { + "name": "Test 4", + "state_topic": "test-topic", + "state_class": None, + "device_class": "enum", + "options": ["red", "green", "blue"], + }, ] } } diff --git a/tests/components/mqtt/test_vacuum.py b/tests/components/mqtt/test_vacuum.py index 7fc4ff981fd..fbffe062261 100644 --- a/tests/components/mqtt/test_vacuum.py +++ b/tests/components/mqtt/test_vacuum.py @@ -2,7 +2,6 @@ from copy import deepcopy import json -import logging from typing import Any from unittest.mock import patch @@ -102,32 +101,6 @@ CONFIG_ALL_SERVICES = help_custom_config( ) -async def test_warning_schema_option( - hass: HomeAssistant, - mqtt_mock_entry: MqttMockHAClientGenerator, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test the warning on use of deprecated schema option.""" - await mqtt_mock_entry() - # Send discovery message with deprecated schema option - async_fire_mqtt_message( - hass, - f"homeassistant/{vacuum.DOMAIN}/bla/config", - '{"name": "test", "schema": "state", "o": {"name": "Bla2MQTT", "sw": "0.99", "url":"https://example.com/support"}}', - ) - await hass.async_block_till_done() - await hass.async_block_till_done(wait_background_tasks=True) - - state = hass.states.get("vacuum.test") - # We do not fail if the schema option is still in the payload, but we log an error - assert state is not None - with caplog.at_level(logging.WARNING): - assert ( - "The 'schema' option has been removed, " - "please remove it from your configuration" in caplog.text - ) - - @pytest.mark.parametrize("hass_config", [DEFAULT_CONFIG]) async def test_default_supported_features( hass: HomeAssistant, mqtt_mock_entry: MqttMockHAClientGenerator diff --git a/tests/components/mqtt_eventstream/test_init.py b/tests/components/mqtt_eventstream/test_init.py index 82def7ef145..b6c1940b149 100644 --- a/tests/components/mqtt_eventstream/test_init.py +++ b/tests/components/mqtt_eventstream/test_init.py @@ -20,7 +20,12 @@ from tests.common import ( from tests.typing import MqttMockHAClient -async def add_eventstream(hass, sub_topic=None, pub_topic=None, ignore_event=None): +async def add_eventstream( + hass: HomeAssistant, + sub_topic: str | None = None, + pub_topic: str | None = None, + ignore_event: list[str] | None = None, +) -> bool: """Add a mqtt_eventstream component.""" config = {} if sub_topic: diff --git a/tests/components/mqtt_json/test_device_tracker.py b/tests/components/mqtt_json/test_device_tracker.py index 36073c11a5d..c372a448d98 100644 --- a/tests/components/mqtt_json/test_device_tracker.py +++ b/tests/components/mqtt_json/test_device_tracker.py @@ -11,11 +11,13 @@ import pytest from homeassistant.components.device_tracker.legacy import ( DOMAIN as DT_DOMAIN, YAML_DEVICES, + AsyncSeeCallback, ) from homeassistant.components.mqtt import DOMAIN as MQTT_DOMAIN from homeassistant.config_entries import ConfigEntryDisabler from homeassistant.const import CONF_PLATFORM from homeassistant.core import HomeAssistant +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component from tests.common import async_fire_mqtt_message @@ -71,9 +73,15 @@ async def test_setup_fails_without_mqtt_being_setup( async def test_ensure_device_tracker_platform_validation(hass: HomeAssistant) -> None: """Test if platform validation was done.""" - async def mock_setup_scanner(hass, config, see, discovery_info=None): + async def mock_setup_scanner( + hass: HomeAssistant, + config: ConfigType, + see: AsyncSeeCallback, + discovery_info: DiscoveryInfoType | None = None, + ) -> bool: """Check that Qos was added by validation.""" assert "qos" in config + return True with patch( "homeassistant.components.mqtt_json.device_tracker.async_setup_scanner", diff --git a/tests/components/mqtt_room/test_sensor.py b/tests/components/mqtt_room/test_sensor.py index e6fe7db3b8e..658dda4b6f8 100644 --- a/tests/components/mqtt_room/test_sensor.py +++ b/tests/components/mqtt_room/test_sensor.py @@ -2,6 +2,7 @@ import datetime import json +from typing import Any from unittest.mock import patch import pytest @@ -40,20 +41,22 @@ FAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 10} REALLY_FAR_MESSAGE = {"id": DEVICE_ID, "name": NAME, "distance": 20} -async def send_message(hass, topic, message): +async def send_message( + hass: HomeAssistant, topic: str, message: dict[str, Any] +) -> None: """Test the sending of a message.""" async_fire_mqtt_message(hass, topic, json.dumps(message)) await hass.async_block_till_done() await hass.async_block_till_done() -async def assert_state(hass, room): +async def assert_state(hass: HomeAssistant, room: str) -> None: """Test the assertion of a room state.""" state = hass.states.get(SENSOR_STATE) assert state.state == room -async def assert_distance(hass, distance): +async def assert_distance(hass: HomeAssistant, distance: int) -> None: """Test the assertion of a distance state.""" state = hass.states.get(SENSOR_STATE) assert state.attributes.get("distance") == distance diff --git a/tests/components/myuplink/test_config_flow.py b/tests/components/myuplink/test_config_flow.py index 3ae32575257..c24d26057de 100644 --- a/tests/components/myuplink/test_config_flow.py +++ b/tests/components/myuplink/test_config_flow.py @@ -105,14 +105,7 @@ async def test_flow_reauth( assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/nam/snapshots/test_sensor.ambr b/tests/components/nam/snapshots/test_sensor.ambr index 426b2ff2e03..16129c5d7ce 100644 --- a/tests/components/nam/snapshots/test_sensor.ambr +++ b/tests/components/nam/snapshots/test_sensor.ambr @@ -1,51 +1,4 @@ # serializer version: 1 -# name: test_sensor[button.nettigo_air_monitor_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.nettigo_air_monitor_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'nam', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'aa:bb:cc:dd:ee:ff-restart', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[button.nettigo_air_monitor_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'Nettigo Air Monitor Restart', - }), - 'context': , - 'entity_id': 'button.nettigo_air_monitor_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensor[sensor.nettigo_air_monitor_bme280_humidity-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/nam/test_config_flow.py b/tests/components/nam/test_config_flow.py index b96eddfd18b..f3465e59fb6 100644 --- a/tests/components/nam/test_config_flow.py +++ b/tests/components/nam/test_config_flow.py @@ -9,7 +9,6 @@ import pytest from homeassistant.components import zeroconf from homeassistant.components.nam.const import DOMAIN from homeassistant.config_entries import ( - SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER, SOURCE_ZEROCONF, @@ -122,6 +121,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: data={"host": "10.10.2.3"}, ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -133,15 +135,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: return_value="aa:bb:cc:dd:ee:ff", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=VALID_AUTH, @@ -160,20 +153,14 @@ async def test_reauth_unsuccessful(hass: HomeAssistant) -> None: data={"host": "10.10.2.3"}, ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.nam.NettigoAirMonitor.async_check_credentials", side_effect=ApiError("API Error"), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=VALID_AUTH, diff --git a/tests/components/nanoleaf/test_config_flow.py b/tests/components/nanoleaf/test_config_flow.py index eaa1c60dcd4..97a314b0bf4 100644 --- a/tests/components/nanoleaf/test_config_flow.py +++ b/tests/components/nanoleaf/test_config_flow.py @@ -297,15 +297,7 @@ async def test_reauth(hass: HomeAssistant) -> None: return_value=True, ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "link" diff --git a/tests/components/neato/test_config_flow.py b/tests/components/neato/test_config_flow.py index 1b86c4e9980..c5289927d91 100644 --- a/tests/components/neato/test_config_flow.py +++ b/tests/components/neato/test_config_flow.py @@ -111,16 +111,15 @@ async def test_reauth( hass, NEATO_DOMAIN, ClientCredential(CLIENT_ID, CLIENT_SECRET) ) - MockConfigEntry( + entry = MockConfigEntry( entry_id="my_entry", domain=NEATO_DOMAIN, data={"username": "abcdef", "password": "123456", "vendor": "neato"}, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Should show form - result = await hass.config_entries.flow.async_init( - "neato", context={"source": config_entries.SOURCE_REAUTH} - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/nest/common.py b/tests/components/nest/common.py index 0a553f9c114..9c8de0224f0 100644 --- a/tests/components/nest/common.py +++ b/tests/components/nest/common.py @@ -92,7 +92,7 @@ class FakeSubscriber(GoogleNestSubscriber): stop_calls = 0 - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called """Initialize Fake Subscriber.""" self._device_manager = DeviceManager() diff --git a/tests/components/nest/conftest.py b/tests/components/nest/conftest.py index 4b64e80543b..85c64aff379 100644 --- a/tests/components/nest/conftest.py +++ b/tests/components/nest/conftest.py @@ -53,7 +53,7 @@ class FakeAuth(AbstractAuth): from the API. """ - def __init__(self): + def __init__(self) -> None: """Initialize FakeAuth.""" super().__init__(None, None) # Tests can set fake responses here. diff --git a/tests/components/nest/test_camera.py b/tests/components/nest/test_camera.py index fd2b5ef0388..6aa25134563 100644 --- a/tests/components/nest/test_camera.py +++ b/tests/components/nest/test_camera.py @@ -165,7 +165,9 @@ async def mock_create_stream(hass: HomeAssistant) -> Generator[AsyncMock]: yield mock_stream -async def async_get_image(hass, width=None, height=None): +async def async_get_image( + hass: HomeAssistant, width: int | None = None, height: int | None = None +) -> bytes: """Get the camera image.""" image = await camera.async_get_image( hass, "camera.my_camera", width=width, height=height @@ -174,7 +176,7 @@ async def async_get_image(hass, width=None, height=None): return image.content -async def fire_alarm(hass, point_in_time): +async def fire_alarm(hass: HomeAssistant, point_in_time: datetime.datetime) -> None: """Fire an alarm and wait for callbacks to run.""" with freeze_time(point_in_time): async_fire_time_changed(hass, point_in_time) diff --git a/tests/components/nest/test_config_flow.py b/tests/components/nest/test_config_flow.py index 5c8f01c8e39..b6e84ce358f 100644 --- a/tests/components/nest/test_config_flow.py +++ b/tests/components/nest/test_config_flow.py @@ -56,7 +56,12 @@ def nest_test_config() -> NestTestConfig: class OAuthFixture: """Simulate the oauth flow used by the config flow.""" - def __init__(self, hass, hass_client_no_auth, aioclient_mock): + def __init__( + self, + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + ) -> None: """Initialize OAuthFixture.""" self.hass = hass self.hass_client = hass_client_no_auth diff --git a/tests/components/nest/test_device_trigger.py b/tests/components/nest/test_device_trigger.py index f818713d382..cf0e1c5ecce 100644 --- a/tests/components/nest/test_device_trigger.py +++ b/tests/components/nest/test_device_trigger.py @@ -59,7 +59,9 @@ def make_camera( } -async def setup_automation(hass, device_id, trigger_type): +async def setup_automation( + hass: HomeAssistant, device_id: str, trigger_type: str +) -> bool: """Set up an automation trigger for testing triggering.""" return await async_setup_component( hass, diff --git a/tests/components/nest/test_event.py b/tests/components/nest/test_event.py new file mode 100644 index 00000000000..f45e6c1c6e6 --- /dev/null +++ b/tests/components/nest/test_event.py @@ -0,0 +1,325 @@ +"""Test for Nest event platform.""" + +import datetime +from typing import Any +from unittest.mock import patch + +from freezegun.api import FrozenDateTimeFactory +from google_nest_sdm.event import EventMessage, EventType +from google_nest_sdm.traits import TraitType +import pytest + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.util.dt import utcnow + +from .common import DEVICE_ID, CreateDevice, FakeSubscriber +from .conftest import PlatformSetup + +EVENT_SESSION_ID = "CjY5Y3VKaTZwR3o4Y19YbTVfMF..." +EVENT_ID = "FWWVQVUdGNUlTU2V4MGV2aTNXV..." +ENCODED_EVENT_ID = "WyJDalk1WTNWS2FUWndSM280WTE5WWJUVmZNRi4uLiIsICJGV1dWUVZVZEdOVWxUVTJWNE1HVjJhVE5YVi4uLiJd" + +EVENT_SESSION_ID2 = "DjY5Y3VKaTZwR3o4Y19YbTVfMF..." +EVENT_ID2 = "GWWVQVUdGNUlTU2V4MGV2aTNXV..." +ENCODED_EVENT_ID2 = "WyJEalk1WTNWS2FUWndSM280WTE5WWJUVmZNRi4uLiIsICJHV1dWUVZVZEdOVWxUVTJWNE1HVjJhVE5YVi4uLiJd" + + +@pytest.fixture +def platforms() -> list[Platform]: + """Fixture for platforms to setup.""" + return [Platform.EVENT] + + +@pytest.fixture(autouse=True) +def enable_prefetch(subscriber: FakeSubscriber) -> None: + """Fixture to enable media fetching for tests to exercise.""" + subscriber.cache_policy.fetch = True + with patch("homeassistant.components.nest.EVENT_MEDIA_CACHE_SIZE", new=5): + yield + + +@pytest.fixture +def device_type() -> str: + """Fixture for the type of device under test.""" + return "sdm.devices.types.DOORBELL" + + +@pytest.fixture +async def device_traits() -> dict[str, Any]: + """Fixture to set default device traits used when creating devices.""" + return { + "sdm.devices.traits.Info": { + "customName": "Front", + }, + "sdm.devices.traits.CameraLiveStream": { + "maxVideoResolution": { + "width": 640, + "height": 480, + }, + "videoCodecs": ["H264"], + "audioCodecs": ["AAC"], + }, + } + + +def create_events(events: str) -> EventMessage: + """Create an EventMessage for events.""" + return create_event_messages( + { + event: { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + } + for event in events + } + ) + + +def create_event_messages( + events: dict[str, Any], parameters: dict[str, Any] | None = None +) -> EventMessage: + """Create an EventMessage for events.""" + return EventMessage.create_event( + { + "eventId": "some-event-id", + "timestamp": utcnow().isoformat(timespec="seconds"), + "resourceUpdate": { + "name": DEVICE_ID, + "events": events, + }, + **(parameters if parameters else {}), + }, + auth=None, + ) + + +@pytest.mark.freeze_time("2024-08-24T12:00:00Z") +@pytest.mark.parametrize( + ( + "trait_types", + "entity_id", + "expected_attributes", + "api_event_type", + "expected_event_type", + ), + [ + ( + [TraitType.DOORBELL_CHIME, TraitType.CAMERA_MOTION], + "event.front_chime", + { + "device_class": "doorbell", + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + }, + EventType.DOORBELL_CHIME, + "doorbell_chime", + ), + ( + [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], + "event.front_motion", + { + "device_class": "motion", + "event_types": ["camera_motion", "camera_person", "camera_sound"], + "friendly_name": "Front Motion", + }, + EventType.CAMERA_MOTION, + "camera_motion", + ), + ( + [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], + "event.front_motion", + { + "device_class": "motion", + "event_types": ["camera_motion", "camera_person", "camera_sound"], + "friendly_name": "Front Motion", + }, + EventType.CAMERA_PERSON, + "camera_person", + ), + ( + [TraitType.CAMERA_MOTION, TraitType.CAMERA_PERSON, TraitType.CAMERA_SOUND], + "event.front_motion", + { + "device_class": "motion", + "event_types": ["camera_motion", "camera_person", "camera_sound"], + "friendly_name": "Front Motion", + }, + EventType.CAMERA_SOUND, + "camera_sound", + ), + ], +) +async def test_receive_events( + hass: HomeAssistant, + subscriber: FakeSubscriber, + setup_platform: PlatformSetup, + create_device: CreateDevice, + trait_types: list[TraitType], + entity_id: str, + expected_attributes: dict[str, str], + api_event_type: EventType, + expected_event_type: str, +) -> None: + """Test a pubsub message for a camera person event.""" + create_device.create( + raw_traits={ + **{trait_type: {} for trait_type in trait_types}, + api_event_type: {}, + } + ) + await setup_platform() + + state = hass.states.get(entity_id) + assert state.state == "unknown" + assert state.attributes == { + **expected_attributes, + "event_type": None, + } + + await subscriber.async_receive_event(create_events([api_event_type])) + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == "2024-08-24T12:00:00.000+00:00" + assert state.attributes == { + **expected_attributes, + "event_type": expected_event_type, + "nest_event_id": ENCODED_EVENT_ID, + } + + +@pytest.mark.parametrize(("trait_type"), [(TraitType.DOORBELL_CHIME)]) +async def test_ignore_unrelated_event( + hass: HomeAssistant, + subscriber: FakeSubscriber, + setup_platform: PlatformSetup, + create_device: CreateDevice, + trait_type: TraitType, +) -> None: + """Test a pubsub message for a camera person event.""" + create_device.create( + raw_traits={ + trait_type: {}, + } + ) + await setup_platform() + + # Device does not have traits matching this event type + await subscriber.async_receive_event(create_events([EventType.CAMERA_MOTION])) + await hass.async_block_till_done() + + state = hass.states.get("event.front_chime") + assert state.state == "unknown" + assert state.attributes == { + "device_class": "doorbell", + "event_type": None, + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + } + + +@pytest.mark.freeze_time("2024-08-24T12:00:00Z") +async def test_event_threads( + hass: HomeAssistant, + subscriber: FakeSubscriber, + setup_platform: PlatformSetup, + create_device: CreateDevice, + freezer: FrozenDateTimeFactory, +) -> None: + """Test multiple events delivered as part of a thread are a single home assistant event.""" + create_device.create( + raw_traits={ + TraitType.DOORBELL_CHIME: {}, + TraitType.CAMERA_CLIP_PREVIEW: {}, + } + ) + await setup_platform() + + state = hass.states.get("event.front_chime") + assert state.state == "unknown" + + # Doorbell event is received + freezer.tick(datetime.timedelta(seconds=2)) + await subscriber.async_receive_event( + create_event_messages( + { + EventType.DOORBELL_CHIME: { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + } + }, + parameters={"eventThreadState": "STARTED"}, + ) + ) + await hass.async_block_till_done() + + state = hass.states.get("event.front_chime") + assert state.state == "2024-08-24T12:00:02.000+00:00" + assert state.attributes == { + "device_class": "doorbell", + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + "event_type": "doorbell_chime", + "nest_event_id": ENCODED_EVENT_ID, + } + + # Media arrives in a second message that ends the thread + freezer.tick(datetime.timedelta(seconds=2)) + await subscriber.async_receive_event( + create_event_messages( + { + EventType.DOORBELL_CHIME: { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + }, + EventType.CAMERA_CLIP_PREVIEW: { + "eventSessionId": EVENT_SESSION_ID, + "previewUrl": "http://example", + }, + }, + parameters={"eventThreadState": "ENDED"}, + ) + ) + await hass.async_block_till_done() + + state = hass.states.get("event.front_chime") + assert ( + state.state == "2024-08-24T12:00:02.000+00:00" + ) # A second event is not received + assert state.attributes == { + "device_class": "doorbell", + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + "event_type": "doorbell_chime", + "nest_event_id": ENCODED_EVENT_ID, + } + + # An additional doorbell press event happens (with an updated session id) + freezer.tick(datetime.timedelta(seconds=2)) + await subscriber.async_receive_event( + create_event_messages( + { + EventType.DOORBELL_CHIME: { + "eventSessionId": EVENT_SESSION_ID2, + "eventId": EVENT_ID2, + }, + EventType.CAMERA_CLIP_PREVIEW: { + "eventSessionId": EVENT_SESSION_ID2, + "previewUrl": "http://example", + }, + }, + parameters={"eventThreadState": "ENDED"}, + ) + ) + await hass.async_block_till_done() + + state = hass.states.get("event.front_chime") + assert state.state == "2024-08-24T12:00:06.000+00:00" # Third event is received + assert state.attributes == { + "device_class": "doorbell", + "event_types": ["doorbell_chime"], + "friendly_name": "Front Chime", + "event_type": "doorbell_chime", + "nest_event_id": ENCODED_EVENT_ID2, + } diff --git a/tests/components/nest/test_events.py b/tests/components/nest/test_events.py index 08cf9f775b7..e746e5f263f 100644 --- a/tests/components/nest/test_events.py +++ b/tests/components/nest/test_events.py @@ -122,28 +122,28 @@ def create_events(events, device_id=DEVICE_ID, timestamp=None): [ ( "sdm.devices.types.DOORBELL", - ["sdm.devices.traits.DoorbellChime"], + ["sdm.devices.traits.DoorbellChime", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.DoorbellChime.Chime", "Doorbell", "doorbell_chime", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraMotion"], + ["sdm.devices.traits.CameraMotion", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraMotion.Motion", "Camera", "camera_motion", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraPerson"], + ["sdm.devices.traits.CameraPerson", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraPerson.Person", "Camera", "camera_person", ), ( "sdm.devices.types.CAMERA", - ["sdm.devices.traits.CameraSound"], + ["sdm.devices.traits.CameraSound", "sdm.devices.traits.CameraEventImage"], "sdm.devices.events.CameraSound.Sound", "Camera", "camera_sound", @@ -186,6 +186,8 @@ async def test_event( "type": expected_type, "timestamp": event_time, } + assert "image" in events[0].data["attachment"] + assert "video" not in events[0].data["attachment"] @pytest.mark.parametrize( @@ -232,6 +234,41 @@ async def test_camera_multiple_event( } +@pytest.mark.parametrize( + "device_traits", + [(["sdm.devices.traits.CameraMotion"])], +) +async def test_media_not_supported( + hass: HomeAssistant, entity_registry: er.EntityRegistry, subscriber, setup_platform +) -> None: + """Test a pubsub message for a camera person event.""" + events = async_capture_events(hass, NEST_EVENT) + await setup_platform() + entry = entity_registry.async_get("camera.front") + assert entry is not None + + event_map = { + "sdm.devices.events.CameraMotion.Motion": { + "eventSessionId": EVENT_SESSION_ID, + "eventId": EVENT_ID, + }, + } + + timestamp = utcnow() + await subscriber.async_receive_event(create_events(event_map, timestamp=timestamp)) + await hass.async_block_till_done() + + event_time = timestamp.replace(microsecond=0) + assert len(events) == 1 + assert event_view(events[0].data) == { + "device_id": entry.device_id, + "type": "camera_motion", + "timestamp": event_time, + } + # Media fetching not supported by this device + assert "attachment" not in events[0].data + + async def test_unknown_event(hass: HomeAssistant, subscriber, setup_platform) -> None: """Test a pubsub message for an unknown event type.""" events = async_capture_events(hass, NEST_EVENT) @@ -344,6 +381,8 @@ async def test_doorbell_event_thread( "type": "camera_motion", "timestamp": timestamp1.replace(microsecond=0), } + assert "image" in events[0].data["attachment"] + assert "video" in events[0].data["attachment"] @pytest.mark.parametrize( diff --git a/tests/components/nest/test_media_source.py b/tests/components/nest/test_media_source.py index 3cfa4ee6687..4bc3559e308 100644 --- a/tests/components/nest/test_media_source.py +++ b/tests/components/nest/test_media_source.py @@ -74,7 +74,6 @@ GENERATE_IMAGE_URL_RESPONSE = { } IMAGE_BYTES_FROM_EVENT = b"test url image bytes" IMAGE_AUTHORIZATION_HEADERS = {"Authorization": "Basic g.0.eventToken"} -NEST_EVENT = "nest_event" def frame_image_data(frame_i, total_frames): @@ -1461,3 +1460,111 @@ async def test_camera_image_resize( assert browse.title == "Front: Recent Events" assert not browse.thumbnail assert len(browse.children) == 1 + + +async def test_event_media_attachment( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + device_registry: dr.DeviceRegistry, + subscriber, + auth, + setup_platform, +) -> None: + """Verify that an event media attachment is successfully resolved.""" + await setup_platform() + + assert len(hass.states.async_all()) == 1 + camera = hass.states.get("camera.front") + assert camera is not None + + device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) + assert device + assert device.name == DEVICE_NAME + + # Capture any events published + received_events = async_capture_events(hass, NEST_EVENT) + + # Set up fake media, and publish image events + auth.responses = [ + aiohttp.web.json_response(GENERATE_IMAGE_URL_RESPONSE), + aiohttp.web.Response(body=IMAGE_BYTES_FROM_EVENT), + ] + event_timestamp = dt_util.now() + await subscriber.async_receive_event( + create_event( + EVENT_SESSION_ID, + EVENT_ID, + PERSON_EVENT, + timestamp=event_timestamp, + ) + ) + await hass.async_block_till_done() + + assert len(received_events) == 1 + received_event = received_events[0] + attachment = received_event.data.get("attachment") + assert attachment + assert list(attachment.keys()) == ["image"] + assert attachment["image"].startswith("/api/nest/event_media") + assert attachment["image"].endswith("/thumbnail") + + # Download the attachment content and verify it works + client = await hass_client() + response = await client.get(attachment["image"]) + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" + await response.read() + + +@pytest.mark.parametrize("device_traits", [BATTERY_CAMERA_TRAITS]) +async def test_event_clip_media_attachment( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + device_registry: dr.DeviceRegistry, + subscriber, + auth, + setup_platform, + mp4, +) -> None: + """Verify that an event media attachment is successfully resolved.""" + await setup_platform() + + assert len(hass.states.async_all()) == 1 + camera = hass.states.get("camera.front") + assert camera is not None + + device = device_registry.async_get_device(identifiers={(DOMAIN, DEVICE_ID)}) + assert device + assert device.name == DEVICE_NAME + + # Capture any events published + received_events = async_capture_events(hass, NEST_EVENT) + + # Set up fake media, and publish clip events + auth.responses = [ + aiohttp.web.Response(body=mp4.getvalue()), + ] + event_timestamp = dt_util.now() + await subscriber.async_receive_event( + create_event_message( + create_battery_event_data(MOTION_EVENT), + timestamp=event_timestamp, + ) + ) + await hass.async_block_till_done() + + assert len(received_events) == 1 + received_event = received_events[0] + attachment = received_event.data.get("attachment") + assert attachment + assert list(attachment.keys()) == ["image", "video"] + assert attachment["image"].startswith("/api/nest/event_media") + assert attachment["image"].endswith("/thumbnail") + assert attachment["video"].startswith("/api/nest/event_media") + assert not attachment["video"].endswith("/thumbnail") + + # Download the attachment content and verify it works + for content_path in attachment.values(): + client = await hass_client() + response = await client.get(content_path) + assert response.status == HTTPStatus.OK, f"Response not matched: {response}" + await response.read() diff --git a/tests/components/netatmo/snapshots/test_sensor.ambr b/tests/components/netatmo/snapshots/test_sensor.ambr index bc2a18d918d..0d13a88cd67 100644 --- a/tests/components/netatmo/snapshots/test_sensor.ambr +++ b/tests/components/netatmo/snapshots/test_sensor.ambr @@ -1159,7 +1159,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.cold_water_power-entry] @@ -1508,7 +1508,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.gas_power-entry] @@ -3257,7 +3257,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.hot_water_power-entry] @@ -3896,7 +3896,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_1_power-entry] @@ -3995,7 +3995,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_2_power-entry] @@ -4094,7 +4094,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_3_power-entry] @@ -4193,7 +4193,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_4_power-entry] @@ -4292,7 +4292,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.line_5_power-entry] @@ -5622,7 +5622,7 @@ 'last_changed': , 'last_reported': , 'last_updated': , - 'state': 'unavailable', + 'state': 'True', }) # --- # name: test_entity[sensor.total_power-entry] diff --git a/tests/components/nextcloud/__init__.py b/tests/components/nextcloud/__init__.py index e2102ed8c25..4bc5a041650 100644 --- a/tests/components/nextcloud/__init__.py +++ b/tests/components/nextcloud/__init__.py @@ -1 +1,38 @@ """Tests for the Nextcloud integration.""" + +from unittest.mock import Mock, patch + +from homeassistant.components.nextcloud.const import DOMAIN +from homeassistant.const import CONF_URL +from homeassistant.core import HomeAssistant + +from .const import MOCKED_ENTRY_ID + +from tests.common import MockConfigEntry + + +def mock_config_entry(config: dict) -> MockConfigEntry: + """Return a mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, title=config[CONF_URL], data=config, entry_id=MOCKED_ENTRY_ID + ) + + +async def init_integration( + hass: HomeAssistant, config: dict, data: dict +) -> MockConfigEntry: + """Set up the nextcloud integration.""" + entry = mock_config_entry(config) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.update = Mock(return_value=True) + mock_nextcloud_monitor.return_value.data = data + assert await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + return entry diff --git a/tests/components/nextcloud/conftest.py b/tests/components/nextcloud/conftest.py index cf3eda55fe1..3234e3773b8 100644 --- a/tests/components/nextcloud/conftest.py +++ b/tests/components/nextcloud/conftest.py @@ -1,19 +1,11 @@ """Fixtrues for the Nextcloud integration tests.""" from collections.abc import Generator -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, patch import pytest -@pytest.fixture -def mock_nextcloud_monitor() -> Mock: - """Mock of NextcloudMonitor.""" - return Mock( - update=Mock(return_value=True), - ) - - @pytest.fixture def mock_setup_entry() -> Generator[AsyncMock]: """Override async_setup_entry.""" diff --git a/tests/components/nextcloud/const.py b/tests/components/nextcloud/const.py new file mode 100644 index 00000000000..2d328292b6f --- /dev/null +++ b/tests/components/nextcloud/const.py @@ -0,0 +1,182 @@ +"""Constants for nextcloud tests.""" + +from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL + +MOCKED_ENTRY_ID = "1234567890abcdef" + +VALID_CONFIG = { + CONF_URL: "https://my.nc_url.local", + CONF_USERNAME: "nc_user", + CONF_PASSWORD: "nc_pass", + CONF_VERIFY_SSL: True, +} + +NC_DATA = { + "nextcloud": { + "system": { + "version": "28.0.4.1", + "theme": "", + "enable_avatars": "yes", + "enable_previews": "yes", + "memcache.local": "\\OC\\Memcache\\APCu", + "memcache.distributed": "none", + "filelocking.enabled": "yes", + "memcache.locking": "none", + "debug": "no", + "freespace": 32769138688, + "cpuload": [2.06640625, 1.58447265625, 1.45263671875], + "mem_total": 30728192, + "mem_free": 6753280, + "swap_total": 10484736, + "swap_free": 10484736, + "apps": { + "num_installed": 41, + "num_updates_available": 0, + "app_updates": [], + }, + "update": {"lastupdatedat": 1713048517, "available": False}, + }, + "storage": { + "num_users": 2, + "num_files": 6783, + "num_storages": 4, + "num_storages_local": 1, + "num_storages_home": 2, + "num_storages_other": 1, + }, + "shares": { + "num_shares": 2, + "num_shares_user": 0, + "num_shares_groups": 0, + "num_shares_link": 2, + "num_shares_mail": 0, + "num_shares_room": 0, + "num_shares_link_no_password": 2, + "num_fed_shares_sent": 0, + "num_fed_shares_received": 1, + "permissions_3_17": 1, + "permissions_3_31": 1, + }, + }, + "server": { + "webserver": "Apache/2.4.57 (Debian)", + "php": { + "version": "8.2.18", + "memory_limit": 536870912, + "max_execution_time": 3600, + "upload_max_filesize": 536870912, + "opcache_revalidate_freq": 60, + "opcache": { + "opcache_enabled": True, + "cache_full": False, + "restart_pending": False, + "restart_in_progress": False, + "memory_usage": { + "used_memory": 72027112, + "free_memory": 62190616, + "wasted_memory": 0, + "current_wasted_percentage": 0, + }, + "interned_strings_usage": { + "buffer_size": 33554432, + "used_memory": 12630360, + "free_memory": 20924072, + "number_of_strings": 69242, + }, + "opcache_statistics": { + "num_cached_scripts": 1406, + "num_cached_keys": 2654, + "max_cached_keys": 16229, + "hits": 9739971, + "start_time": 1722222008, + "last_restart_time": 0, + "oom_restarts": 0, + "hash_restarts": 0, + "manual_restarts": 0, + "misses": 1406, + "blacklist_misses": 0, + "blacklist_miss_ratio": 0, + "opcache_hit_rate": 99.9855667222406, + }, + "jit": { + "enabled": True, + "on": True, + "kind": 5, + "opt_level": 5, + "opt_flags": 6, + "buffer_size": 134217712, + "buffer_free": 133190688, + }, + }, + "apcu": { + "cache": { + "num_slots": 4099, + "ttl": 0, + "num_hits": 590911, + "num_misses": 55250, + "num_inserts": 55421, + "num_entries": 102, + "expunges": 0, + "start_time": 1722222008, + "mem_size": 175296, + "memory_type": "mmap", + }, + "sma": {"num_seg": 1, "seg_size": 33554312, "avail_mem": 33342368}, + }, + "extensions": [ + "Core", + "date", + "libxml", + "openssl", + "pcre", + "sqlite3", + "zlib", + "ctype", + "curl", + "dom", + "fileinfo", + "filter", + "hash", + "iconv", + "json", + "mbstring", + "SPL", + "session", + "PDO", + "pdo_sqlite", + "standard", + "posix", + "random", + "Reflection", + "Phar", + "SimpleXML", + "tokenizer", + "xml", + "xmlreader", + "xmlwriter", + "mysqlnd", + "apache2handler", + "apcu", + "bcmath", + "exif", + "ftp", + "gd", + "gmp", + "imagick", + "intl", + "ldap", + "memcached", + "pcntl", + "pdo_mysql", + "pdo_pgsql", + "redis", + "sodium", + "sysvsem", + "zip", + "Zend OPcache", + ], + }, + "database": {"type": "sqlite3", "version": "3.40.1", "size": "4784128"}, + }, + "activeUsers": {"last5minutes": 0, "last1hour": 0, "last24hours": 0}, +} diff --git a/tests/components/nextcloud/snapshots/test_binary_sensor.ambr b/tests/components/nextcloud/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..1831419af52 --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_binary_sensor.ambr @@ -0,0 +1,277 @@ +# serializer version: 1 +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_avatars_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_avatars_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Avatars enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_enable_avatars', + 'unique_id': '1234567890abcdef#system_enable_avatars', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_avatars_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Avatars enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_avatars_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_debug_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_debug_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Debug enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_debug', + 'unique_id': '1234567890abcdef#system_debug', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_debug_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Debug enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_debug_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_filelocking_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_filelocking_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Filelocking enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_filelocking_enabled', + 'unique_id': '1234567890abcdef#system_filelocking.enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_filelocking_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Filelocking enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_filelocking_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_active-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_active', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT active', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_on', + 'unique_id': '1234567890abcdef#jit_on', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_active-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT active', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_active', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_enabled', + 'unique_id': '1234567890abcdef#jit_enabled', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_jit_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_jit_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_previews_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.my_nc_url_local_previews_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Previews enabled', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_enable_previews', + 'unique_id': '1234567890abcdef#system_enable_previews', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[binary_sensor.my_nc_url_local_previews_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Previews enabled', + }), + 'context': , + 'entity_id': 'binary_sensor.my_nc_url_local_previews_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nextcloud/snapshots/test_config_flow.ambr b/tests/components/nextcloud/snapshots/test_config_flow.ambr index 3334478ba24..e87db0a25c0 100644 --- a/tests/components/nextcloud/snapshots/test_config_flow.ambr +++ b/tests/components/nextcloud/snapshots/test_config_flow.ambr @@ -1,16 +1,8 @@ # serializer version: 1 -# name: test_import - dict({ - 'password': 'nc_pass', - 'url': 'nc_url', - 'username': 'nc_user', - 'verify_ssl': True, - }) -# --- # name: test_reauth dict({ 'password': 'other_password', - 'url': 'nc_url', + 'url': 'https://my.nc_url.local', 'username': 'other_user', 'verify_ssl': True, }) @@ -18,7 +10,7 @@ # name: test_user_create_entry dict({ 'password': 'nc_pass', - 'url': 'nc_url', + 'url': 'https://my.nc_url.local', 'username': 'nc_user', 'verify_ssl': True, }) diff --git a/tests/components/nextcloud/snapshots/test_sensor.ambr b/tests/components/nextcloud/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..c49ba3496da --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_sensor.ambr @@ -0,0 +1,3973 @@ +# serializer version: 1 +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_5_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_5_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last 5 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last5minutes', + 'unique_id': '1234567890abcdef#activeUsers_last5minutes', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_5_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last 5 minutes', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_5_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_day-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_day', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last day', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last24hours', + 'unique_id': '1234567890abcdef#activeUsers_last24hours', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_day-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last day', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_day', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_hour-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_hour', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of active users last hour', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_activeusers_last1hour', + 'unique_id': '1234567890abcdef#activeUsers_last1hour', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_active_users_last_hour-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of active users last hour', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_active_users_last_hour', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_files-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_files', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of files', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_files', + 'unique_id': '1234567890abcdef#storage_num_files', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_files-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of files', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_files', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6783', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_group_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_group_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of group shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_groups', + 'unique_id': '1234567890abcdef#shares_num_shares_groups', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_group_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of group shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_group_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_link_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_link_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of link shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_link', + 'unique_id': '1234567890abcdef#shares_num_shares_link', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_link_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of link shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_link_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_local_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_local_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of local storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_local', + 'unique_id': '1234567890abcdef#storage_num_storages_local', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_local_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of local storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_local_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_mail_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_mail_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of mail shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_mail', + 'unique_id': '1234567890abcdef#shares_num_shares_mail', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_mail_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of mail shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_mail_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_other_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_other_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of other storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_other', + 'unique_id': '1234567890abcdef#storage_num_storages_other', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_other_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of other storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_other_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_passwordless_link_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_passwordless_link_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of passwordless link shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_link_no_password', + 'unique_id': '1234567890abcdef#shares_num_shares_link_no_password', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_passwordless_link_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of passwordless link shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_passwordless_link_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_room_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_room_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of room shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_room', + 'unique_id': '1234567890abcdef#shares_num_shares_room', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_room_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of room shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_room_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares', + 'unique_id': '1234567890abcdef#shares_num_shares', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_received-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_received', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares received', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_fed_shares_received', + 'unique_id': '1234567890abcdef#shares_num_fed_shares_received', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_received-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares received', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_received', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_sent-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_sent', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of shares sent', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_fed_shares_sent', + 'unique_id': '1234567890abcdef#shares_num_fed_shares_sent', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_shares_sent-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of shares sent', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_shares_sent', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of storages', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages', + 'unique_id': '1234567890abcdef#storage_num_storages', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of storages', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages_at_home-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages_at_home', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of storages at home', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_storages_home', + 'unique_id': '1234567890abcdef#storage_num_storages_home', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_storages_at_home-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of storages at home', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_storages_at_home', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_amount_of_user', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of user', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_storage_num_users', + 'unique_id': '1234567890abcdef#storage_num_users', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of user', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user_shares-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user_shares', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Amount of user shares', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_shares_num_shares_user', + 'unique_id': '1234567890abcdef#shares_num_shares_user', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_amount_of_user_shares-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Amount of user shares', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_amount_of_user_shares', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_apps_installed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_apps_installed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Apps installed', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_apps_num_installed', + 'unique_id': '1234567890abcdef#system_apps_num_installed', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_apps_installed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Apps installed', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_apps_installed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '41', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_expunges-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_expunges', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache expunges', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_expunges', + 'unique_id': '1234567890abcdef#cache_expunges', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_expunges-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache expunges', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_expunges', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_memory_type', + 'unique_id': '1234567890abcdef#cache_memory_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache memory', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'mmap', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cache memory size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_mem_size', + 'unique_id': '1234567890abcdef#cache_mem_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_memory_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Cache memory size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_memory_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.175296', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_entires-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_entires', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of entires', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_entries', + 'unique_id': '1234567890abcdef#cache_num_entries', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_entires-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of entires', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_entires', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '102', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_hits-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_hits', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of hits', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_hits', + 'unique_id': '1234567890abcdef#cache_num_hits', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_hits-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of hits', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_hits', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '590911', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_inserts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_inserts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of inserts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_inserts', + 'unique_id': '1234567890abcdef#cache_num_inserts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_inserts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of inserts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_inserts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55421', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_misses', + 'unique_id': '1234567890abcdef#cache_num_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '55250', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_slots-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_slots', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache number of slots', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_num_slots', + 'unique_id': '1234567890abcdef#cache_num_slots', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_number_of_slots-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache number of slots', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_number_of_slots', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4099', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_start_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_start_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cache start time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_start_time', + 'unique_id': '1234567890abcdef#cache_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_start_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Cache start time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_start_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-07-29T03:00:08+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_cache_ttl', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Cache ttl', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_cache_ttl', + 'unique_id': '1234567890abcdef#cache_ttl', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cache_ttl-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Cache ttl', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cache_ttl', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_15_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 15 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_15', + 'unique_id': '1234567890abcdef#system_cpuload_15', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_15_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 15 minutes', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_15_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.45263671875', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_1_minute', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 1 minute', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_1', + 'unique_id': '1234567890abcdef#system_cpuload_1', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_1_minute-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 1 minute', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_1_minute', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.06640625', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_5_minutes', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU Load last 5 minutes', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_cpuload_5', + 'unique_id': '1234567890abcdef#system_cpuload_5', + 'unit_of_measurement': 'load', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_cpu_load_last_5_minutes-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local CPU Load last 5 minutes', + 'unit_of_measurement': 'load', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_cpu_load_last_5_minutes', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.58447265625', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_database_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Database size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_size', + 'unique_id': '1234567890abcdef#database_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Database size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '4.784128', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_type-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_database_type', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Database type', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_type', + 'unique_id': '1234567890abcdef#database_type', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_type-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Database type', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_type', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'sqlite3', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_database_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Database version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_database_version', + 'unique_id': '1234567890abcdef#database_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_database_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Database version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_database_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3.40.1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_mem_free', + 'unique_id': '1234567890abcdef#system_mem_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6.75328', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_space-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_space', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free space', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_freespace', + 'unique_id': '1234567890abcdef#system_freespace', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_space-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free space', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_space', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.769138688', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_swap_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_free_swap_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Free swap memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_swap_free', + 'unique_id': '1234567890abcdef#system_swap_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_free_swap_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Free swap memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_free_swap_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.484736', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_buffer_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_buffer_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned buffer size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_buffer_size', + 'unique_id': '1234567890abcdef#interned_strings_usage_buffer_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_buffer_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned buffer size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_buffer_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.554432', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_free_memory', + 'unique_id': '1234567890abcdef#interned_strings_usage_free_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.924072', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_number_of_strings-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_number_of_strings', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Interned number of strings', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_number_of_strings', + 'unique_id': '1234567890abcdef#interned_strings_usage_number_of_strings', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_number_of_strings-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Interned number of strings', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_number_of_strings', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '69242', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_used_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_interned_used_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Interned used memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_interned_strings_usage_used_memory', + 'unique_id': '1234567890abcdef#interned_strings_usage_used_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_interned_used_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Interned used memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_interned_used_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '12.63036', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_free-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_free', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'JIT buffer free', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_buffer_free', + 'unique_id': '1234567890abcdef#jit_buffer_free', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_free-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local JIT buffer free', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_free', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '133.190688', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'JIT buffer size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_buffer_size', + 'unique_id': '1234567890abcdef#jit_buffer_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_buffer_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local JIT buffer size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_buffer_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '134.217712', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_kind-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_kind', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT kind', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_kind', + 'unique_id': '1234567890abcdef#jit_kind', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_kind-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT kind', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_kind', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_flags-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_flags', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT opt flags', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_opt_flags', + 'unique_id': '1234567890abcdef#jit_opt_flags', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_flags-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT opt flags', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_flags', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_level-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_level', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'JIT opt level', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_jit_opt_level', + 'unique_id': '1234567890abcdef#jit_opt_level', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_jit_opt_level-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local JIT opt level', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_jit_opt_level', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_miss_ratio-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_miss_ratio', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache blacklist miss ratio', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_blacklist_miss_ratio', + 'unique_id': '1234567890abcdef#opcache_statistics_blacklist_miss_ratio', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_miss_ratio-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache blacklist miss ratio', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_miss_ratio', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache blacklist misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_blacklist_misses', + 'unique_id': '1234567890abcdef#opcache_statistics_blacklist_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_blacklist_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache blacklist misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_blacklist_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_keys-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_keys', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache cached keys', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_num_cached_keys', + 'unique_id': '1234567890abcdef#opcache_statistics_num_cached_keys', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_keys-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache cached keys', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_keys', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2654', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_scripts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_scripts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache cached scripts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_num_cached_scripts', + 'unique_id': '1234567890abcdef#opcache_statistics_num_cached_scripts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_cached_scripts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache cached scripts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_cached_scripts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_current_wasted_percentage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_current_wasted_percentage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache current wasted percentage', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_current_wasted_percentage', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_current_wasted_percentage', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_current_wasted_percentage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache current wasted percentage', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_current_wasted_percentage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_free_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_free_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache free memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_free_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_free_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_free_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache free memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_free_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '62.190616', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hash_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hash_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hash restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_hash_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_hash_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hash_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hash restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hash_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hit_rate-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hit_rate', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hit rate', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_opcache_hit_rate', + 'unique_id': '1234567890abcdef#opcache_statistics_opcache_hit_rate', + 'unit_of_measurement': '%', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hit_rate-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hit rate', + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hit_rate', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99.9855667222406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hits-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hits', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache hits', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_hits', + 'unique_id': '1234567890abcdef#opcache_statistics_hits', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_hits-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache hits', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_hits', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '9739971', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_last_restart_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_last_restart_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache last restart time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_last_restart_time', + 'unique_id': '1234567890abcdef#opcache_statistics_last_restart_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_last_restart_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Opcache last restart time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_last_restart_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1970-01-01T00:00:00+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_manual_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_manual_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache manual restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_manual_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_manual_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_manual_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache manual restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_manual_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_max_cached_keys-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_max_cached_keys', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache max cached keys', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_max_cached_keys', + 'unique_id': '1234567890abcdef#opcache_statistics_max_cached_keys', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_max_cached_keys-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache max cached keys', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_max_cached_keys', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '16229', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_misses-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_misses', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache misses', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_misses', + 'unique_id': '1234567890abcdef#opcache_statistics_misses', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_misses-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache misses', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_misses', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1406', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_out_of_memory_restarts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_out_of_memory_restarts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Opcache out of memory restarts', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_oom_restarts', + 'unique_id': '1234567890abcdef#opcache_statistics_oom_restarts', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_out_of_memory_restarts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Opcache out of memory restarts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_out_of_memory_restarts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_start_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_start_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache start time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_opcache_statistics_start_time', + 'unique_id': '1234567890abcdef#opcache_statistics_start_time', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_start_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'my.nc_url.local Opcache start time', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_start_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2024-07-29T03:00:08+00:00', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_used_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_used_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache used memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_used_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_used_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_used_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache used memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_used_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '72.027112', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_wasted_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_opcache_wasted_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Opcache wasted memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_opcache_memory_usage_wasted_memory', + 'unique_id': '1234567890abcdef#server_php_opcache_memory_usage_wasted_memory', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_opcache_wasted_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Opcache wasted memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_opcache_wasted_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_max_execution_time-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_max_execution_time', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP max execution time', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_max_execution_time', + 'unique_id': '1234567890abcdef#server_php_max_execution_time', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_max_execution_time-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'my.nc_url.local PHP max execution time', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_max_execution_time', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '3600', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_memory_limit-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_memory_limit', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP memory limit', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_memory_limit', + 'unique_id': '1234567890abcdef#server_php_memory_limit', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_memory_limit-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local PHP memory limit', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_memory_limit', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '536.870912', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_upload_maximum_filesize-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_upload_maximum_filesize', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'PHP upload maximum filesize', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_upload_max_filesize', + 'unique_id': '1234567890abcdef#server_php_upload_max_filesize', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_upload_maximum_filesize-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local PHP upload maximum filesize', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_upload_maximum_filesize', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '536.870912', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_php_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'PHP version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_php_version', + 'unique_id': '1234567890abcdef#server_php_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_php_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local PHP version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_php_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8.2.18', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_available_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_available_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SMA available memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_avail_mem', + 'unique_id': '1234567890abcdef#sma_avail_mem', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_available_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local SMA available memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_available_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.342368', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_number_of_segments-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_number_of_segments', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'SMA number of segments', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_num_seg', + 'unique_id': '1234567890abcdef#sma_num_seg', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_number_of_segments-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local SMA number of segments', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_number_of_segments', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_segment_size-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_sma_segment_size', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'SMA segment size', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_sma_seg_size', + 'unique_id': '1234567890abcdef#sma_seg_size', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_sma_segment_size-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local SMA segment size', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_sma_segment_size', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '33.554312', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_distributed-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_distributed', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache distributed', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_distributed', + 'unique_id': '1234567890abcdef#system_memcache.distributed', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_distributed-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache distributed', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_distributed', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_local-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_local', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache local', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_local', + 'unique_id': '1234567890abcdef#system_memcache.local', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_local-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache local', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_local', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '\\OC\\Memcache\\APCu', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_locking-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_locking', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System memcache locking', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_memcache_locking', + 'unique_id': '1234567890abcdef#system_memcache.locking', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_memcache_locking-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System memcache locking', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_memcache_locking', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'none', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_theme-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_system_theme', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System theme', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_theme', + 'unique_id': '1234567890abcdef#system_theme', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_theme-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System theme', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_theme', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_version-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_system_version', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'System version', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_version', + 'unique_id': '1234567890abcdef#system_version', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_system_version-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local System version', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_system_version', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.0.4.1', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_total_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_mem_total', + 'unique_id': '1234567890abcdef#system_mem_total', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Total memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_total_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30.728192', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_swap_memory-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_total_swap_memory', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 2, + }), + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Total swap memory', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_swap_total', + 'unique_id': '1234567890abcdef#system_swap_total', + 'unit_of_measurement': , + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_total_swap_memory-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'my.nc_url.local Total swap memory', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_total_swap_memory', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.484736', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_updates_available-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.my_nc_url_local_updates_available', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Updates available', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_system_apps_num_updates_available', + 'unique_id': '1234567890abcdef#system_apps_num_updates_available', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_updates_available-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Updates available', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_updates_available', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_webserver-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.my_nc_url_local_webserver', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Webserver', + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'nextcloud_server_webserver', + 'unique_id': '1234567890abcdef#server_webserver', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[sensor.my_nc_url_local_webserver-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'my.nc_url.local Webserver', + }), + 'context': , + 'entity_id': 'sensor.my_nc_url_local_webserver', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Apache/2.4.57 (Debian)', + }) +# --- diff --git a/tests/components/nextcloud/snapshots/test_update.ambr b/tests/components/nextcloud/snapshots/test_update.ambr new file mode 100644 index 00000000000..1ee6264c204 --- /dev/null +++ b/tests/components/nextcloud/snapshots/test_update.ambr @@ -0,0 +1,57 @@ +# serializer version: 1 +# name: test_async_setup_entry[update.my_nc_url_local_none-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'update', + 'entity_category': , + 'entity_id': 'update.my_nc_url_local_none', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'nextcloud', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '1234567890abcdef#update', + 'unit_of_measurement': None, + }) +# --- +# name: test_async_setup_entry[update.my_nc_url_local_none-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'auto_update': False, + 'entity_picture': 'https://brands.home-assistant.io/_/nextcloud/icon.png', + 'friendly_name': 'my.nc_url.local None', + 'in_progress': False, + 'installed_version': '28.0.4.1', + 'latest_version': '28.0.4.1', + 'release_summary': None, + 'release_url': 'https://nextcloud.com/changelog/#28-0-4', + 'skipped_version': None, + 'supported_features': , + 'title': None, + }), + 'context': , + 'entity_id': 'update.my_nc_url_local_none', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- diff --git a/tests/components/nextcloud/test_binary_sensor.py b/tests/components/nextcloud/test_binary_sensor.py new file mode 100644 index 00000000000..ff121c53ec3 --- /dev/null +++ b/tests/components/nextcloud/test_binary_sensor.py @@ -0,0 +1,33 @@ +"""Tests for the Nextcloud binary sensors.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch( + "homeassistant.components.nextcloud.PLATFORMS", [Platform.BINARY_SENSOR] + ): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 6 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/nextcloud/test_config_flow.py b/tests/components/nextcloud/test_config_flow.py index 9a881197cf9..16b6bf3bc04 100644 --- a/tests/components/nextcloud/test_config_flow.py +++ b/tests/components/nextcloud/test_config_flow.py @@ -1,6 +1,6 @@ """Tests for the Nextcloud config flow.""" -from unittest.mock import Mock, patch +from unittest.mock import patch from nextcloudmonitor import ( NextcloudMonitorAuthorizationError, @@ -10,26 +10,21 @@ from nextcloudmonitor import ( import pytest from syrupy.assertion import SnapshotAssertion -from homeassistant.components.nextcloud import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_URL, CONF_USERNAME, CONF_VERIFY_SSL +from homeassistant.components.nextcloud.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from .const import VALID_CONFIG + from tests.common import MockConfigEntry pytestmark = pytest.mark.usefixtures("mock_setup_entry") -VALID_CONFIG = { - CONF_URL: "nc_url", - CONF_USERNAME: "nc_user", - CONF_PASSWORD: "nc_pass", - CONF_VERIFY_SSL: True, -} - async def test_user_create_entry( - hass: HomeAssistant, mock_nextcloud_monitor: Mock, snapshot: SnapshotAssertion + hass: HomeAssistant, snapshot: SnapshotAssertion ) -> None: """Test that the user step works.""" # start user flow @@ -85,7 +80,7 @@ async def test_user_create_entry( # test success with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -94,17 +89,15 @@ async def test_user_create_entry( await hass.async_block_till_done() assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "nc_url" + assert result["title"] == "https://my.nc_url.local" assert result["data"] == snapshot -async def test_user_already_configured( - hass: HomeAssistant, mock_nextcloud_monitor: Mock -) -> None: +async def test_user_already_configured(hass: HomeAssistant) -> None: """Test that errors are shown when duplicates are added.""" entry = MockConfigEntry( domain=DOMAIN, - title="nc_url", + title="https://my.nc_url.local", unique_id="nc_url", data=VALID_CONFIG, ) @@ -119,7 +112,7 @@ async def test_user_already_configured( with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -131,24 +124,18 @@ async def test_user_already_configured( assert result["reason"] == "already_configured" -async def test_reauth( - hass: HomeAssistant, mock_nextcloud_monitor: Mock, snapshot: SnapshotAssertion -) -> None: +async def test_reauth(hass: HomeAssistant, snapshot: SnapshotAssertion) -> None: """Test that the re-auth flow works.""" entry = MockConfigEntry( domain=DOMAIN, - title="nc_url", + title="https://my.nc_url.local", unique_id="nc_url", data=VALID_CONFIG, ) entry.add_to_hass(hass) # start reauth flow - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -206,7 +193,7 @@ async def test_reauth( # test success with patch( "homeassistant.components.nextcloud.config_flow.NextcloudMonitor", - return_value=mock_nextcloud_monitor, + return_value=True, ): result = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/nextcloud/test_coordinator.py b/tests/components/nextcloud/test_coordinator.py new file mode 100644 index 00000000000..91f7e7967a3 --- /dev/null +++ b/tests/components/nextcloud/test_coordinator.py @@ -0,0 +1,69 @@ +"""Tests for the Nextcloud coordinator.""" + +from unittest.mock import Mock, patch + +from freezegun.api import FrozenDateTimeFactory +from nextcloudmonitor import ( + NextcloudMonitor, + NextcloudMonitorAuthorizationError, + NextcloudMonitorConnectionError, + NextcloudMonitorError, + NextcloudMonitorRequestError, +) +import pytest + +from homeassistant.components.nextcloud.const import DEFAULT_SCAN_INTERVAL +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant + +from . import mock_config_entry +from .const import NC_DATA, VALID_CONFIG + +from tests.common import async_fire_time_changed + + +@pytest.mark.parametrize( + ("error"), + [ + (NextcloudMonitorAuthorizationError), + (NextcloudMonitorConnectionError), + (NextcloudMonitorRequestError), + ], +) +async def test_data_update( + hass: HomeAssistant, freezer: FrozenDateTimeFactory, error: NextcloudMonitorError +) -> None: + """Test a coordinator data updates.""" + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", spec=NextcloudMonitor + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.return_value.update = Mock( + return_value=True, + side_effect=[None, error, None], + ) + mock_nextcloud_monitor.return_value.data = NC_DATA + assert await hass.config_entries.async_setup(entry.entry_id) + + # Test successful setup and first data fetch + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state != STATE_UNAVAILABLE for state in states) + + # Test states get unavailable on error + freezer.tick(DEFAULT_SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state == STATE_UNAVAILABLE for state in states) + + # Test successful data fetch + freezer.tick(DEFAULT_SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done(wait_background_tasks=True) + states = hass.states.async_all() + assert (state != STATE_UNAVAILABLE for state in states) diff --git a/tests/components/nextcloud/test_init.py b/tests/components/nextcloud/test_init.py new file mode 100644 index 00000000000..70c8f545c6b --- /dev/null +++ b/tests/components/nextcloud/test_init.py @@ -0,0 +1,95 @@ +"""Tests for the Nextcloud init.""" + +from unittest.mock import Mock, patch + +from nextcloudmonitor import ( + NextcloudMonitorAuthorizationError, + NextcloudMonitorConnectionError, + NextcloudMonitorError, + NextcloudMonitorRequestError, +) +import pytest + +from homeassistant.components.nextcloud.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import CONF_URL, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration, mock_config_entry +from .const import MOCKED_ENTRY_ID, NC_DATA, VALID_CONFIG + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, +) -> None: + """Test a successful setup entry.""" + assert await init_integration(hass, VALID_CONFIG, NC_DATA) + + +async def test_unique_id_migration( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Test migration of unique ids to stable ones.""" + + object_id = "my_nc_url_local_system_version" + entity_id = f"{Platform.SENSOR}.{object_id}" + + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + entity = entity_registry.async_get_or_create( + Platform.SENSOR, + DOMAIN, + f"{VALID_CONFIG[CONF_URL]}#nextcloud_system_version", + suggested_object_id=object_id, + config_entry=entry, + ) + + # test old unique id + assert entity.entity_id == entity_id + assert entity.unique_id == f"{VALID_CONFIG[CONF_URL]}#nextcloud_system_version" + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor" + ) as mock_nextcloud_monitor, + ): + mock_nextcloud_monitor.update = Mock(return_value=True) + mock_nextcloud_monitor.return_value.data = NC_DATA + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + # test migrated unique id + reg_entry = entity_registry.async_get(entity_id) + assert reg_entry.unique_id == f"{MOCKED_ENTRY_ID}#system_version" + + +@pytest.mark.parametrize( + ("exception", "expcted_entry_state"), + [ + (NextcloudMonitorAuthorizationError, ConfigEntryState.SETUP_ERROR), + (NextcloudMonitorConnectionError, ConfigEntryState.SETUP_RETRY), + (NextcloudMonitorRequestError, ConfigEntryState.SETUP_RETRY), + ], +) +async def test_setup_entry_errors( + hass: HomeAssistant, + exception: NextcloudMonitorError, + expcted_entry_state: ConfigEntryState, +) -> None: + """Test a successful setup entry.""" + + entry = mock_config_entry(VALID_CONFIG) + entry.add_to_hass(hass) + + with ( + patch( + "homeassistant.components.nextcloud.NextcloudMonitor", side_effect=exception + ), + ): + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + assert entry.state == expcted_entry_state diff --git a/tests/components/nextcloud/test_sensor.py b/tests/components/nextcloud/test_sensor.py new file mode 100644 index 00000000000..1ea2c87db11 --- /dev/null +++ b/tests/components/nextcloud/test_sensor.py @@ -0,0 +1,31 @@ +"""Tests for the Nextcloud sensors.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.SENSOR]): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 80 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) diff --git a/tests/components/nextcloud/test_update.py b/tests/components/nextcloud/test_update.py new file mode 100644 index 00000000000..d47c9f1df53 --- /dev/null +++ b/tests/components/nextcloud/test_update.py @@ -0,0 +1,80 @@ +"""Tests for the Nextcloud update entity.""" + +from copy import deepcopy +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import init_integration +from .const import NC_DATA, VALID_CONFIG + +from tests.common import snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_async_setup_entry( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test a successful setup entry.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + entry = await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 1 + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_setup_entity_without_update( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test update entity is created w/o available update.""" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, NC_DATA) + + states = hass.states.async_all() + assert len(states) == 1 + assert states[0].state == STATE_OFF + assert states[0].attributes["installed_version"] == "28.0.4.1" + assert states[0].attributes["latest_version"] == "28.0.4.1" + assert ( + states[0].attributes["release_url"] == "https://nextcloud.com/changelog/#28-0-4" + ) + + +async def test_setup_entity_with_update( + hass: HomeAssistant, snapshot: SnapshotAssertion +) -> None: + """Test update entity is created with available update.""" + data = deepcopy(NC_DATA) + data["nextcloud"]["system"]["update"]["available"] = True + data["nextcloud"]["system"]["update"]["available_version"] = "30.0.0.0" + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, data) + + states = hass.states.async_all() + assert len(states) == 1 + assert states[0].state == STATE_ON + assert states[0].attributes["installed_version"] == "28.0.4.1" + assert states[0].attributes["latest_version"] == "30.0.0.0" + assert ( + states[0].attributes["release_url"] == "https://nextcloud.com/changelog/#30-0-0" + ) + + +async def test_setup_no_entity(hass: HomeAssistant) -> None: + """Test no update entity is created, when no data available.""" + data = deepcopy(NC_DATA) + data["nextcloud"]["system"].pop("update") # only nc<28.0.0 + with patch("homeassistant.components.nextcloud.PLATFORMS", [Platform.UPDATE]): + await init_integration(hass, VALID_CONFIG, data) + + states = hass.states.async_all() + assert len(states) == 0 diff --git a/tests/components/nextdns/snapshots/test_binary_sensor.ambr b/tests/components/nextdns/snapshots/test_binary_sensor.ambr index bd4ecbba084..814b4c1ac16 100644 --- a/tests/components/nextdns/snapshots/test_binary_sensor.ambr +++ b/tests/components/nextdns/snapshots/test_binary_sensor.ambr @@ -1,1095 +1,4 @@ # serializer version: 1 -# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_Sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_binary_sensor[binary_sensor.fake_profile_device_connection_status-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1184,1094 +93,3 @@ 'state': 'off', }) # --- -# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': , - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_binary_sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_binary_sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nextdns/snapshots/test_sensor.ambr b/tests/components/nextdns/snapshots/test_sensor.ambr index 34b40433e3b..14bebea53f8 100644 --- a/tests/components/nextdns/snapshots/test_sensor.ambr +++ b/tests/components/nextdns/snapshots/test_sensor.ambr @@ -1,144 +1,4 @@ # serializer version: 1 -# name: test_sensor[binary_sensor.fake_profile_device_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_connection_status', - 'unique_id': 'xyz12_this_device_nextdns_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device profile connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_profile_connection_status', - 'unique_id': 'xyz12_this_device_profile_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.fake_profile_device_profile_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device profile connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[button.fake_profile_clear_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.fake_profile_clear_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clear logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'clear_logs', - 'unique_id': 'xyz12_clear_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[button.fake_profile_clear_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Clear logs', - }), - 'context': , - 'entity_id': 'button.fake_profile_clear_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_sensor[sensor.fake_profile_dns_over_http_3_queries-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1389,3361 +1249,3 @@ 'state': '40', }) # --- -# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'AI-Driven threat detection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ai_threat_detection', - 'unique_id': 'xyz12_ai_threat_detection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_ai_driven_threat_detection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile AI-Driven threat detection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_ai_driven_threat_detection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Allow affiliate & tracking links', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'allow_affiliate', - 'unique_id': 'xyz12_allow_affiliate', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_allow_affiliate_tracking_links-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Allow affiliate & tracking links', - }), - 'context': , - 'entity_id': 'switch.fake_profile_allow_affiliate_tracking_links', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Anonymized EDNS client subnet', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'anonymized_ecs', - 'unique_id': 'xyz12_anonymized_ecs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_anonymized_edns_client_subnet-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Anonymized EDNS client subnet', - }), - 'context': , - 'entity_id': 'switch.fake_profile_anonymized_edns_client_subnet', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_9gag-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block 9GAG', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_9gag', - 'unique_id': 'xyz12_block_9gag', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_9gag-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block 9GAG', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_9gag', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_amazon-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_amazon', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Amazon', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_amazon', - 'unique_id': 'xyz12_block_amazon', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_amazon-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Amazon', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_amazon', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_bereal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bereal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block BeReal', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bereal', - 'unique_id': 'xyz12_block_bereal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_bereal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block BeReal', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bereal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_blizzard-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_blizzard', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Blizzard', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_blizzard', - 'unique_id': 'xyz12_block_blizzard', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_blizzard-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Blizzard', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_blizzard', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_bypass_methods-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block bypass methods', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_bypass_methods', - 'unique_id': 'xyz12_block_bypass_methods', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_bypass_methods-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block bypass methods', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_bypass_methods', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_chatgpt-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_chatgpt', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block ChatGPT', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_chatgpt', - 'unique_id': 'xyz12_block_chatgpt', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_chatgpt-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block ChatGPT', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_chatgpt', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block child sexual abuse material', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_csam', - 'unique_id': 'xyz12_block_csam', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_child_sexual_abuse_material-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block child sexual abuse material', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_child_sexual_abuse_material', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dailymotion-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dailymotion', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Dailymotion', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_dailymotion', - 'unique_id': 'xyz12_block_dailymotion', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dailymotion-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Dailymotion', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dailymotion', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dating-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dating', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dating', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_dating', - 'unique_id': 'xyz12_block_dating', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dating-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dating', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dating', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_discord-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_discord', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Discord', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_discord', - 'unique_id': 'xyz12_block_discord', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_discord-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Discord', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_discord', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block disguised third-party trackers', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disguised_trackers', - 'unique_id': 'xyz12_block_disguised_trackers', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_disguised_third_party_trackers-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block disguised third-party trackers', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disguised_third_party_trackers', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_disney_plus-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_disney_plus', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Disney Plus', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_disneyplus', - 'unique_id': 'xyz12_block_disneyplus', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_disney_plus-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Disney Plus', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_disney_plus', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block dynamic DNS hostnames', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ddns', - 'unique_id': 'xyz12_block_ddns', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_dynamic_dns_hostnames-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block dynamic DNS hostnames', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_dynamic_dns_hostnames', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_ebay-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_ebay', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block eBay', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_ebay', - 'unique_id': 'xyz12_block_ebay', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_ebay-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block eBay', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_ebay', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_facebook-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_facebook', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Facebook', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_facebook', - 'unique_id': 'xyz12_block_facebook', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_facebook-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Facebook', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_facebook', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_fortnite-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_fortnite', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Fortnite', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_fortnite', - 'unique_id': 'xyz12_block_fortnite', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_fortnite-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Fortnite', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_fortnite', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_gambling-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_gambling', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block gambling', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_gambling', - 'unique_id': 'xyz12_block_gambling', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_gambling-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block gambling', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_gambling', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_google_chat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_google_chat', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Google Chat', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_google_chat', - 'unique_id': 'xyz12_block_google_chat', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_google_chat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Google Chat', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_google_chat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_hbo_max-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_hbo_max', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block HBO Max', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_hbomax', - 'unique_id': 'xyz12_block_hbomax', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_hbo_max-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block HBO Max', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_hbo_max', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_hulu-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_hulu', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Hulu', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'xyz12_block_hulu', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_hulu-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Hulu', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_hulu', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_imgur-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_imgur', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Imgur', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_imgur', - 'unique_id': 'xyz12_block_imgur', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_imgur-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Imgur', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_imgur', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_instagram-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_instagram', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Instagram', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_instagram', - 'unique_id': 'xyz12_block_instagram', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_instagram-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Instagram', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_instagram', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_league_of_legends-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_league_of_legends', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block League of Legends', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_leagueoflegends', - 'unique_id': 'xyz12_block_leagueoflegends', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_league_of_legends-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block League of Legends', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_league_of_legends', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_mastodon-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_mastodon', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Mastodon', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_mastodon', - 'unique_id': 'xyz12_block_mastodon', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_mastodon-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Mastodon', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_mastodon', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_messenger-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_messenger', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Messenger', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_messenger', - 'unique_id': 'xyz12_block_messenger', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_messenger-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Messenger', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_messenger', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_minecraft-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_minecraft', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Minecraft', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_minecraft', - 'unique_id': 'xyz12_block_minecraft', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_minecraft-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Minecraft', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_minecraft', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_netflix-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_netflix', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Netflix', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_netflix', - 'unique_id': 'xyz12_block_netflix', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_netflix-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Netflix', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_netflix', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_newly_registered_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block newly registered domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_nrd', - 'unique_id': 'xyz12_block_nrd', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_newly_registered_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block newly registered domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_newly_registered_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_online_gaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_online_gaming', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block online gaming', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_online_gaming', - 'unique_id': 'xyz12_block_online_gaming', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_online_gaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block online gaming', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_online_gaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_page-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_page', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block page', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_page', - 'unique_id': 'xyz12_block_page', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_page-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block page', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_page', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_block_parked_domains-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block parked domains', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_parked_domains', - 'unique_id': 'xyz12_block_parked_domains', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_parked_domains-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block parked domains', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_parked_domains', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_pinterest-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_pinterest', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Pinterest', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_pinterest', - 'unique_id': 'xyz12_block_pinterest', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_pinterest-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Pinterest', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_pinterest', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_piracy-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_piracy', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block piracy', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_piracy', - 'unique_id': 'xyz12_block_piracy', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_piracy-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block piracy', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_piracy', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_playstation_network-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_playstation_network', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block PlayStation Network', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_playstation_network', - 'unique_id': 'xyz12_block_playstation_network', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_playstation_network-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block PlayStation Network', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_playstation_network', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_porn-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_porn', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block porn', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_porn', - 'unique_id': 'xyz12_block_porn', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_porn-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block porn', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_porn', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_prime_video-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_prime_video', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Prime Video', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_primevideo', - 'unique_id': 'xyz12_block_primevideo', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_prime_video-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Prime Video', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_prime_video', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_reddit-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_reddit', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Reddit', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_reddit', - 'unique_id': 'xyz12_block_reddit', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_reddit-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Reddit', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_reddit', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_roblox-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_roblox', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Roblox', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_roblox', - 'unique_id': 'xyz12_block_roblox', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_roblox-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Roblox', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_roblox', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_signal-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_signal', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Signal', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_signal', - 'unique_id': 'xyz12_block_signal', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_signal-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Signal', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_signal', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_skype-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_skype', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Skype', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_skype', - 'unique_id': 'xyz12_block_skype', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_skype-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Skype', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_skype', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_snapchat-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_snapchat', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Snapchat', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_snapchat', - 'unique_id': 'xyz12_block_snapchat', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_snapchat-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Snapchat', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_snapchat', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_social_networks-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_social_networks', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block social networks', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_social_networks', - 'unique_id': 'xyz12_block_social_networks', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_social_networks-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block social networks', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_social_networks', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_spotify-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_spotify', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Spotify', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_spotify', - 'unique_id': 'xyz12_block_spotify', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_spotify-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Spotify', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_spotify', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_steam-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_steam', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Steam', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_steam', - 'unique_id': 'xyz12_block_steam', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_steam-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Steam', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_steam', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_telegram-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_telegram', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Telegram', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_telegram', - 'unique_id': 'xyz12_block_telegram', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_telegram-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Telegram', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_telegram', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tiktok-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tiktok', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block TikTok', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tiktok', - 'unique_id': 'xyz12_block_tiktok', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tiktok-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block TikTok', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tiktok', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tinder-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tinder', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Tinder', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tinder', - 'unique_id': 'xyz12_block_tinder', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tinder-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Tinder', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tinder', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_tumblr-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_tumblr', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Tumblr', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_tumblr', - 'unique_id': 'xyz12_block_tumblr', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_tumblr-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Tumblr', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_tumblr', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_twitch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_twitch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Twitch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_twitch', - 'unique_id': 'xyz12_block_twitch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_twitch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Twitch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_twitch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_video_streaming-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_video_streaming', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block video streaming', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_video_streaming', - 'unique_id': 'xyz12_block_video_streaming', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_video_streaming-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block video streaming', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_video_streaming', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_vimeo-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_vimeo', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Vimeo', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_vimeo', - 'unique_id': 'xyz12_block_vimeo', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_vimeo-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Vimeo', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_vimeo', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_vk-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_vk', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block VK', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_vk', - 'unique_id': 'xyz12_block_vk', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_vk-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block VK', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_vk', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_whatsapp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_whatsapp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block WhatsApp', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_whatsapp', - 'unique_id': 'xyz12_block_whatsapp', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_whatsapp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block WhatsApp', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_whatsapp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block X (formerly Twitter)', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_twitter', - 'unique_id': 'xyz12_block_twitter', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_x_formerly_twitter-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block X (formerly Twitter)', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_x_formerly_twitter', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_xbox_live-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_xbox_live', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Xbox Live', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_xboxlive', - 'unique_id': 'xyz12_block_xboxlive', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_xbox_live-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Xbox Live', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_xbox_live', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_youtube-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_youtube', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block YouTube', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_youtube', - 'unique_id': 'xyz12_block_youtube', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_youtube-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block YouTube', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_youtube', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_block_zoom-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_block_zoom', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Block Zoom', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'block_zoom', - 'unique_id': 'xyz12_block_zoom', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_block_zoom-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Block Zoom', - }), - 'context': , - 'entity_id': 'switch.fake_profile_block_zoom', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cache_boost-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cache boost', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cache_boost', - 'unique_id': 'xyz12_cache_boost', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cache_boost-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cache boost', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cache_boost', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cname_flattening-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'CNAME flattening', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cname_flattening', - 'unique_id': 'xyz12_cname_flattening', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cname_flattening-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile CNAME flattening', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cname_flattening', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_cryptojacking_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Cryptojacking protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'cryptojacking_protection', - 'unique_id': 'xyz12_cryptojacking_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_cryptojacking_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Cryptojacking protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_cryptojacking_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_dns_rebinding_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS rebinding protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dns_rebinding_protection', - 'unique_id': 'xyz12_dns_rebinding_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_dns_rebinding_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS rebinding protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_dns_rebinding_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Domain generation algorithms protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dga_protection', - 'unique_id': 'xyz12_dga_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_domain_generation_algorithms_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Domain generation algorithms protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_domain_generation_algorithms_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_force_safesearch-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force SafeSearch', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'safesearch', - 'unique_id': 'xyz12_safesearch', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_force_safesearch-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force SafeSearch', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_safesearch', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Force YouTube restricted mode', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'youtube_restricted_mode', - 'unique_id': 'xyz12_youtube_restricted_mode', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_force_youtube_restricted_mode-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Force YouTube restricted mode', - }), - 'context': , - 'entity_id': 'switch.fake_profile_force_youtube_restricted_mode', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_google_safe_browsing-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Google safe browsing', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'google_safe_browsing', - 'unique_id': 'xyz12_google_safe_browsing', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_google_safe_browsing-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Google safe browsing', - }), - 'context': , - 'entity_id': 'switch.fake_profile_google_safe_browsing', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IDN homograph attacks protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'idn_homograph_attacks_protection', - 'unique_id': 'xyz12_idn_homograph_attacks_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_idn_homograph_attacks_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IDN homograph attacks protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_idn_homograph_attacks_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'logs', - 'unique_id': 'xyz12_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Logs', - }), - 'context': , - 'entity_id': 'switch.fake_profile_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Threat intelligence feeds', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'threat_intelligence_feeds', - 'unique_id': 'xyz12_threat_intelligence_feeds', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_threat_intelligence_feeds-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Threat intelligence feeds', - }), - 'context': , - 'entity_id': 'switch.fake_profile_threat_intelligence_feeds', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_typosquatting_protection-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Typosquatting protection', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'typosquatting_protection', - 'unique_id': 'xyz12_typosquatting_protection', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_typosquatting_protection-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Typosquatting protection', - }), - 'context': , - 'entity_id': 'switch.fake_profile_typosquatting_protection', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.fake_profile_web3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.fake_profile_web3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Web3', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'web3', - 'unique_id': 'xyz12_web3', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.fake_profile_web3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Web3', - }), - 'context': , - 'entity_id': 'switch.fake_profile_web3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- diff --git a/tests/components/nextdns/snapshots/test_switch.ambr b/tests/components/nextdns/snapshots/test_switch.ambr index 8472f02e8c5..3328e341a2e 100644 --- a/tests/components/nextdns/snapshots/test_switch.ambr +++ b/tests/components/nextdns/snapshots/test_switch.ambr @@ -1,1394 +1,4 @@ # serializer version: 1 -# name: test_switch[binary_sensor.fake_profile_device_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_connection_status', - 'unique_id': 'xyz12_this_device_nextdns_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Device profile connection status', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'device_profile_connection_status', - 'unique_id': 'xyz12_this_device_profile_connection_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[binary_sensor.fake_profile_device_profile_connection_status-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': 'Fake Profile Device profile connection status', - }), - 'context': , - 'entity_id': 'binary_sensor.fake_profile_device_profile_connection_status', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- -# name: test_switch[button.fake_profile_clear_logs-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.fake_profile_clear_logs', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Clear logs', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'clear_logs', - 'unique_id': 'xyz12_clear_logs', - 'unit_of_measurement': None, - }) -# --- -# name: test_switch[button.fake_profile_clear_logs-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Clear logs', - }), - 'context': , - 'entity_id': 'button.fake_profile_clear_logs', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTP/3 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh3_queries', - 'unique_id': 'xyz12_doh3_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '15', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTP/3 queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh3_queries_ratio', - 'unique_id': 'xyz12_doh3_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_http_3_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTP/3 queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_http_3_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '13.0', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTPS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh_queries', - 'unique_id': 'xyz12_doh_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTPS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-HTTPS queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doh_queries_ratio', - 'unique_id': 'xyz12_doh_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_https_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-HTTPS queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_https_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '17.4', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-QUIC queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doq_queries', - 'unique_id': 'xyz12_doq_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-QUIC queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-QUIC queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'doq_queries_ratio', - 'unique_id': 'xyz12_doq_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_quic_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-QUIC queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_quic_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '8.7', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-TLS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dot_queries', - 'unique_id': 'xyz12_dot_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-TLS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '30', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS-over-TLS queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dot_queries_ratio', - 'unique_id': 'xyz12_dot_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_over_tls_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS-over-TLS queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_over_tls_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26.1', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'all_queries', - 'unique_id': 'xyz12_all_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries blocked', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'blocked_queries', - 'unique_id': 'xyz12_blocked_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries blocked', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries blocked ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'blocked_queries_ratio', - 'unique_id': 'xyz12_blocked_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_blocked_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries blocked ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_blocked_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '20.0', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_relayed-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dns_queries_relayed', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNS queries relayed', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'relayed_queries', - 'unique_id': 'xyz12_relayed_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dns_queries_relayed-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNS queries relayed', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dns_queries_relayed', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC not validated queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'not_validated_queries', - 'unique_id': 'xyz12_not_validated_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_not_validated_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC not validated queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_not_validated_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '25', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC validated queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'validated_queries', - 'unique_id': 'xyz12_validated_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC validated queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '75', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'DNSSEC validated queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'validated_queries_ratio', - 'unique_id': 'xyz12_validated_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_dnssec_validated_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile DNSSEC validated queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_dnssec_validated_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '75.0', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_encrypted_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Encrypted queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'encrypted_queries', - 'unique_id': 'xyz12_encrypted_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Encrypted queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_encrypted_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Encrypted queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'encrypted_queries_ratio', - 'unique_id': 'xyz12_encrypted_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_encrypted_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Encrypted queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_encrypted_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '60.0', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv4_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv4_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv4 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv4_queries', - 'unique_id': 'xyz12_ipv4_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv4_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv4 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv4_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '90', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv6_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv6 queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv6_queries', - 'unique_id': 'xyz12_ipv6_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv6 queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv6_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'IPv6 queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'ipv6_queries_ratio', - 'unique_id': 'xyz12_ipv6_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_ipv6_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile IPv6 queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_ipv6_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.0', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_tcp_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'TCP queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tcp_queries', - 'unique_id': 'xyz12_tcp_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile TCP queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_tcp_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'TCP queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tcp_queries_ratio', - 'unique_id': 'xyz12_tcp_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_tcp_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile TCP queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_tcp_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.0', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_udp_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'UDP queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'udp_queries', - 'unique_id': 'xyz12_udp_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile UDP queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_udp_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries_ratio-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_udp_queries_ratio', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'UDP queries ratio', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'udp_queries_ratio', - 'unique_id': 'xyz12_udp_queries_ratio', - 'unit_of_measurement': '%', - }) -# --- -# name: test_switch[sensor.fake_profile_udp_queries_ratio-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile UDP queries ratio', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_udp_queries_ratio', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '34.8', - }) -# --- -# name: test_switch[sensor.fake_profile_unencrypted_queries-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': , - 'entity_id': 'sensor.fake_profile_unencrypted_queries', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Unencrypted queries', - 'platform': 'nextdns', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'unencrypted_queries', - 'unique_id': 'xyz12_unencrypted_queries', - 'unit_of_measurement': 'queries', - }) -# --- -# name: test_switch[sensor.fake_profile_unencrypted_queries-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Fake Profile Unencrypted queries', - 'state_class': , - 'unit_of_measurement': 'queries', - }), - 'context': , - 'entity_id': 'sensor.fake_profile_unencrypted_queries', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '40', - }) -# --- # name: test_switch[switch.fake_profile_ai_driven_threat_detection-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/nibe_heatpump/snapshots/test_climate.ambr b/tests/components/nibe_heatpump/snapshots/test_climate.ambr index fb3e2d1003b..2db9a813bff 100644 --- a/tests/components/nibe_heatpump/snapshots/test_climate.ambr +++ b/tests/components/nibe_heatpump/snapshots/test_climate.ambr @@ -97,12 +97,6 @@ 'state': 'unavailable', }) # --- -# name: test_active_accessory[Model.S320-s2-climate.climate_system_21][initial] - None -# --- -# name: test_active_accessory[Model.S320-s2-climate.climate_system_s1][initial] - None -# --- # name: test_basic[Model.F1155-s2-climate.climate_system_s2][cooling] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/nice_go/__init__.py b/tests/components/nice_go/__init__.py new file mode 100644 index 00000000000..0208795a12c --- /dev/null +++ b/tests/components/nice_go/__init__.py @@ -0,0 +1,22 @@ +"""Tests for the Nice G.O. integration.""" + +from unittest.mock import patch + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration( + hass: HomeAssistant, config_entry: MockConfigEntry, platforms: list[Platform] +) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + with patch( + "homeassistant.components.nice_go.PLATFORMS", + platforms, + ): + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/nice_go/conftest.py b/tests/components/nice_go/conftest.py new file mode 100644 index 00000000000..9ed3d0d19cf --- /dev/null +++ b/tests/components/nice_go/conftest.py @@ -0,0 +1,79 @@ +"""Common fixtures for the Nice G.O. tests.""" + +from collections.abc import Generator +from datetime import datetime +from unittest.mock import AsyncMock, patch + +from nice_go import Barrier, BarrierState, ConnectionState +import pytest + +from homeassistant.components.nice_go.const import ( + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, +) +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD + +from tests.common import MockConfigEntry, load_json_array_fixture + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.nice_go.async_setup_entry", + return_value=True, + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_nice_go() -> Generator[AsyncMock]: + """Mock a Nice G.O. client.""" + with ( + patch( + "homeassistant.components.nice_go.coordinator.NiceGOApi", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.nice_go.config_flow.NiceGOApi", + new=mock_client, + ), + ): + client = mock_client.return_value + client.authenticate.return_value = "test-refresh-token" + client.authenticate_refresh.return_value = None + client.id_token = None + client.get_all_barriers.return_value = [ + Barrier( + id=barrier["id"], + type=barrier["type"], + controlLevel=barrier["controlLevel"], + attr=barrier["attr"], + state=BarrierState( + **barrier["state"], + connectionState=ConnectionState(**barrier["connectionState"]), + ), + api=client, + ) + for barrier in load_json_array_fixture("get_all_barriers.json", DOMAIN) + ] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + entry_id="acefdd4b3a4a0911067d1cf51414201e", + title="test-email", + data={ + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + CONF_REFRESH_TOKEN: "test-refresh-token", + CONF_REFRESH_TOKEN_CREATION_TIME: datetime.now().timestamp(), + }, + version=1, + unique_id="test-email", + ) diff --git a/tests/components/nice_go/fixtures/device_state_update.json b/tests/components/nice_go/fixtures/device_state_update.json new file mode 100644 index 00000000000..53d89c5411b --- /dev/null +++ b/tests/components/nice_go/fixtures/device_state_update.json @@ -0,0 +1,21 @@ +{ + "data": { + "devicesStatesUpdateFeed": { + "receiver": "ORG/0:2372", + "item": { + "deviceId": "1", + "desired": "{\"key\":\"value\"}", + "reported": "{\"displayName\":\"Test Garage 1\",\"autoDisabled\":false,\"migrationStatus\":\"DONE\",\"deviceId\":\"1\",\"lightStatus\":\"0,100\",\"vcnMode\":false,\"deviceFwVersion\":\"1.2.3.4.5.6\",\"barrierStatus\":\"0,0,1,0,-1,0,3,0\"}", + "timestamp": 123, + "version": 123, + "connectionState": { + "connected": true, + "updatedTimestamp": "123", + "__typename": "DeviceConnectionState" + }, + "__typename": "DeviceState" + }, + "__typename": "DeviceStateUpdateNotice" + } + } +} diff --git a/tests/components/nice_go/fixtures/device_state_update_1.json b/tests/components/nice_go/fixtures/device_state_update_1.json new file mode 100644 index 00000000000..cc718e8b093 --- /dev/null +++ b/tests/components/nice_go/fixtures/device_state_update_1.json @@ -0,0 +1,21 @@ +{ + "data": { + "devicesStatesUpdateFeed": { + "receiver": "ORG/0:2372", + "item": { + "deviceId": "2", + "desired": "{\"key\":\"value\"}", + "reported": "{\"displayName\":\"Test Garage 2\",\"autoDisabled\":false,\"migrationStatus\":\"DONE\",\"deviceId\":\"2\",\"lightStatus\":\"1,100\",\"vcnMode\":false,\"deviceFwVersion\":\"1.2.3.4.5.6\",\"barrierStatus\":\"1,100,2,0,-1,0,3,0\"}", + "timestamp": 123, + "version": 123, + "connectionState": { + "connected": true, + "updatedTimestamp": "123", + "__typename": "DeviceConnectionState" + }, + "__typename": "DeviceState" + }, + "__typename": "DeviceStateUpdateNotice" + } + } +} diff --git a/tests/components/nice_go/fixtures/get_all_barriers.json b/tests/components/nice_go/fixtures/get_all_barriers.json new file mode 100644 index 00000000000..adb0fb4bacd --- /dev/null +++ b/tests/components/nice_go/fixtures/get_all_barriers.json @@ -0,0 +1,64 @@ +[ + { + "id": "1", + "type": "WallStation", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "1", + "desired": { "key": "value" }, + "reported": { + "displayName": "Test Garage 1", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "1", + "lightStatus": "1,100", + "vcnMode": false, + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "0,0,0,0,-1,0,3,0" + }, + "timestamp": null, + "version": null + }, + "connectionState": { + "connected": true, + "updatedTimestamp": "123" + } + }, + { + "id": "2", + "type": "WallStation", + "controlLevel": "Owner", + "attr": [ + { + "key": "organization", + "value": "test_organization" + } + ], + "state": { + "deviceId": "2", + "desired": { "key": "value" }, + "reported": { + "displayName": "Test Garage 2", + "autoDisabled": false, + "migrationStatus": "DONE", + "deviceId": "2", + "lightStatus": "0,100", + "vcnMode": true, + "deviceFwVersion": "1.2.3.4.5.6", + "barrierStatus": "1,100,0,0,-1,0,3,0" + }, + "timestamp": null, + "version": null + }, + "connectionState": { + "connected": true, + "updatedTimestamp": "123" + } + } +] diff --git a/tests/components/nice_go/snapshots/test_cover.ambr b/tests/components/nice_go/snapshots/test_cover.ambr new file mode 100644 index 00000000000..391d91584bf --- /dev/null +++ b/tests/components/nice_go/snapshots/test_cover.ambr @@ -0,0 +1,193 @@ +# serializer version: 1 +# name: test_covers[cover.test_garage_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '1', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- +# name: test_covers[cover.test_garage_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': '2', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 2', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'open', + }) +# --- +# name: test_covers[cover.test_garage_3-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_3', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'test3-GDO', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_3-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 3', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_3', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'opening', + }) +# --- +# name: test_covers[cover.test_garage_4-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'cover', + 'entity_category': None, + 'entity_id': 'cover.test_garage_4', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'test4-GDO', + 'unit_of_measurement': None, + }) +# --- +# name: test_covers[cover.test_garage_4-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 4', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_4', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closing', + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_diagnostics.ambr b/tests/components/nice_go/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..6f9428ed246 --- /dev/null +++ b/tests/components/nice_go/snapshots/test_diagnostics.ambr @@ -0,0 +1,44 @@ +# serializer version: 1 +# name: test_entry_diagnostics + dict({ + 'coordinator_data': dict({ + '1': dict({ + 'barrier_status': 'closed', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '1', + 'light_status': True, + 'name': 'Test Garage 1', + 'vacation_mode': False, + }), + '2': dict({ + 'barrier_status': 'open', + 'connected': True, + 'fw_version': '1.2.3.4.5.6', + 'id': '2', + 'light_status': False, + 'name': 'Test Garage 2', + 'vacation_mode': True, + }), + }), + 'entry': dict({ + 'data': dict({ + 'email': '**REDACTED**', + 'password': '**REDACTED**', + 'refresh_token': '**REDACTED**', + }), + 'disabled_by': None, + 'domain': 'nice_go', + 'entry_id': 'acefdd4b3a4a0911067d1cf51414201e', + 'minor_version': 1, + 'options': dict({ + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': '**REDACTED**', + 'unique_id': '**REDACTED**', + 'version': 1, + }), + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_init.ambr b/tests/components/nice_go/snapshots/test_init.ambr new file mode 100644 index 00000000000..ff389568d1b --- /dev/null +++ b/tests/components/nice_go/snapshots/test_init.ambr @@ -0,0 +1,16 @@ +# serializer version: 1 +# name: test_on_data_none_parsed + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'garage', + 'friendly_name': 'Test Garage 1', + 'supported_features': , + }), + 'context': , + 'entity_id': 'cover.test_garage_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'closed', + }) +# --- diff --git a/tests/components/nice_go/snapshots/test_light.ambr b/tests/components/nice_go/snapshots/test_light.ambr new file mode 100644 index 00000000000..2e29d9589dd --- /dev/null +++ b/tests/components/nice_go/snapshots/test_light.ambr @@ -0,0 +1,223 @@ +# serializer version: 1 +# name: test_data[light.test_garage_1_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_1_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': '1', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_1_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': , + 'friendly_name': 'Test Garage 1 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_1_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_data[light.test_garage_2_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_2_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'nice_go', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': '2', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_2_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'color_mode': None, + 'friendly_name': 'Test Garage 2 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_2_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_data[light.test_garage_3_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_3_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'test3-Light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_3_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': None, + 'color_mode': None, + 'friendly_name': 'Test Garage 3 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_3_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_data[light.test_garage_4_light-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'supported_color_modes': list([ + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'light', + 'entity_category': None, + 'entity_id': 'light.test_garage_4_light', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Light', + 'platform': 'linear_garage_door', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'light', + 'unique_id': 'test4-Light', + 'unit_of_measurement': None, + }) +# --- +# name: test_data[light.test_garage_4_light-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'brightness': 255, + 'color_mode': , + 'friendly_name': 'Test Garage 4 Light', + 'supported_color_modes': list([ + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'light.test_garage_4_light', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/nice_go/test_config_flow.py b/tests/components/nice_go/test_config_flow.py new file mode 100644 index 00000000000..67930b9f752 --- /dev/null +++ b/tests/components/nice_go/test_config_flow.py @@ -0,0 +1,111 @@ +"""Test the Nice G.O. config flow.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from nice_go import AuthFailedError +import pytest + +from homeassistant.components.nice_go.const import ( + CONF_REFRESH_TOKEN, + CONF_REFRESH_TOKEN_CREATION_TIME, + DOMAIN, +) +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + + +async def test_form( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_setup_entry: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test we get the form.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert not result["errors"] + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-email" + assert result["data"][CONF_EMAIL] == "test-email" + assert result["data"][CONF_PASSWORD] == "test-password" + assert result["data"][CONF_REFRESH_TOKEN] == "test-refresh-token" + assert CONF_REFRESH_TOKEN_CREATION_TIME in result["data"] + assert result["result"].unique_id == "test-email" + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("side_effect", "expected_error"), + [(AuthFailedError, "invalid_auth"), (Exception, "unknown")], +) +async def test_form_exceptions( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_setup_entry: AsyncMock, + side_effect: Exception, + expected_error: str, +) -> None: + """Test we handle invalid auth.""" + mock_nice_go.authenticate.side_effect = side_effect + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": expected_error} + mock_nice_go.authenticate.side_effect = None + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + + +async def test_duplicate_device( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_nice_go: AsyncMock, +) -> None: + """Test that duplicate devices are handled.""" + mock_config_entry.add_to_hass(hass) + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_EMAIL: "test-email", + CONF_PASSWORD: "test-password", + }, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/nice_go/test_cover.py b/tests/components/nice_go/test_cover.py new file mode 100644 index 00000000000..a6eb9bd27fb --- /dev/null +++ b/tests/components/nice_go/test_cover.py @@ -0,0 +1,115 @@ +"""Test Nice G.O. cover.""" + +from unittest.mock import AsyncMock + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.cover import ( + DOMAIN as COVER_DOMAIN, + SERVICE_CLOSE_COVER, + SERVICE_OPEN_COVER, +) +from homeassistant.components.nice_go.const import DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + STATE_CLOSED, + STATE_CLOSING, + STATE_OPEN, + STATE_OPENING, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform + + +async def test_covers( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that data gets parsed and returned appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_open_cover( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that opening the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_2"}, + blocking=True, + ) + + assert mock_nice_go.open_barrier.call_count == 0 + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_OPEN_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_1"}, + blocking=True, + ) + + assert mock_nice_go.open_barrier.call_count == 1 + + +async def test_close_cover( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that closing the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_1"}, + blocking=True, + ) + + assert mock_nice_go.close_barrier.call_count == 0 + + await hass.services.async_call( + COVER_DOMAIN, + SERVICE_CLOSE_COVER, + {ATTR_ENTITY_ID: "cover.test_garage_2"}, + blocking=True, + ) + + assert mock_nice_go.close_barrier.call_count == 1 + + +async def test_update_cover_state( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that closing the cover works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert hass.states.get("cover.test_garage_1").state == STATE_CLOSED + assert hass.states.get("cover.test_garage_2").state == STATE_OPEN + + device_update = load_json_object_fixture("device_state_update.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update) + device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update_1) + + assert hass.states.get("cover.test_garage_1").state == STATE_OPENING + assert hass.states.get("cover.test_garage_2").state == STATE_CLOSING diff --git a/tests/components/nice_go/test_diagnostics.py b/tests/components/nice_go/test_diagnostics.py new file mode 100644 index 00000000000..f91f5748792 --- /dev/null +++ b/tests/components/nice_go/test_diagnostics.py @@ -0,0 +1,31 @@ +"""Test diagnostics of Nice G.O..""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion +from syrupy.filters import props + +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_entry_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test config entry diagnostics.""" + await setup_integration(hass, mock_config_entry, []) + result = await get_diagnostics_for_config_entry( + hass, hass_client, mock_config_entry + ) + assert result == snapshot( + exclude=props("created_at", "modified_at", "refresh_token_creation_time") + ) diff --git a/tests/components/nice_go/test_event.py b/tests/components/nice_go/test_event.py new file mode 100644 index 00000000000..0038b2882ad --- /dev/null +++ b/tests/components/nice_go/test_event.py @@ -0,0 +1,31 @@ +"""Nice G.O. event tests.""" + +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +@pytest.mark.freeze_time("2024-08-19") +async def test_barrier_obstructed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test barrier obstructed.""" + mock_nice_go.event = MagicMock() + await setup_integration(hass, mock_config_entry, [Platform.EVENT]) + + await mock_nice_go.event.call_args_list[2][0][0]({"deviceId": "1"}) + await hass.async_block_till_done() + + event_state = hass.states.get("event.test_garage_1_barrier_obstructed") + + assert event_state.state == "2024-08-19T00:00:00.000+00:00" + assert event_state.attributes["event_type"] == "barrier_obstructed" diff --git a/tests/components/nice_go/test_init.py b/tests/components/nice_go/test_init.py new file mode 100644 index 00000000000..5568a7ea62a --- /dev/null +++ b/tests/components/nice_go/test_init.py @@ -0,0 +1,288 @@ +"""Test Nice G.O. init.""" + +from datetime import timedelta +from unittest.mock import AsyncMock, MagicMock + +from freezegun.api import FrozenDateTimeFactory +from nice_go import ApiError, AuthFailedError, Barrier, BarrierState +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.nice_go.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import issue_registry as ir + +from . import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + + +async def test_unload_entry( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test the unload entry.""" + + await setup_integration(hass, mock_config_entry, []) + assert mock_config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(mock_config_entry.entry_id) + await hass.async_block_till_done() + assert mock_config_entry.state is ConfigEntryState.NOT_LOADED + + +@pytest.mark.parametrize( + ("side_effect", "entry_state"), + [ + ( + AuthFailedError(), + ConfigEntryState.SETUP_ERROR, + ), + (ApiError(), ConfigEntryState.SETUP_RETRY), + ], +) +async def test_setup_failure( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + side_effect: Exception, + entry_state: ConfigEntryState, +) -> None: + """Test reauth trigger setup.""" + + mock_nice_go.authenticate_refresh.side_effect = side_effect + + await setup_integration(hass, mock_config_entry, []) + assert mock_config_entry.state is entry_state + + +async def test_firmware_update_required( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + issue_registry: ir.IssueRegistry, +) -> None: + """Test firmware update required.""" + + mock_nice_go.get_all_barriers.return_value = [ + Barrier( + id="test-device-id", + type="test-type", + controlLevel="test-control-level", + attr=[{"key": "test-attr", "value": "test-value"}], + state=BarrierState( + deviceId="test-device-id", + reported={ + "displayName": "test-display-name", + "migrationStatus": "NOT_STARTED", + }, + desired=None, + connectionState=None, + version=None, + timestamp=None, + ), + api=mock_nice_go, + ) + ] + + await setup_integration(hass, mock_config_entry, []) + + issue = issue_registry.async_get_issue( + DOMAIN, + "firmware_update_required_test-device-id", + ) + assert issue + + +async def test_update_refresh_token( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test updating refresh token.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.return_value = "new-refresh-token" + freezer.tick(timedelta(days=30, seconds=1)) + async_fire_time_changed(hass) + assert await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 2 + assert mock_config_entry.data["refresh_token"] == "new-refresh-token" + + +async def test_update_refresh_token_api_error( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test updating refresh token with error.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.side_effect = ApiError + freezer.tick(timedelta(days=30)) + async_fire_time_changed(hass) + assert not await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_config_entry.data["refresh_token"] == "test-refresh-token" + assert "API error" in caplog.text + + +async def test_update_refresh_token_auth_failed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test updating refresh token with error.""" + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_nice_go.authenticate.call_count == 0 + + mock_nice_go.authenticate.side_effect = AuthFailedError + freezer.tick(timedelta(days=30)) + async_fire_time_changed(hass) + assert not await hass.config_entries.async_reload(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_nice_go.authenticate_refresh.call_count == 1 + assert mock_nice_go.authenticate.call_count == 1 + assert mock_nice_go.get_all_barriers.call_count == 1 + assert mock_config_entry.data["refresh_token"] == "test-refresh-token" + assert "Authentication failed" in caplog.text + + +async def test_client_listen_api_error( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + caplog: pytest.LogCaptureFixture, + freezer: FrozenDateTimeFactory, +) -> None: + """Test client listen with error.""" + + mock_nice_go.connect.side_effect = ApiError + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert "API error" in caplog.text + + mock_nice_go.connect.side_effect = None + + freezer.tick(timedelta(seconds=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert mock_nice_go.connect.call_count == 2 + + +async def test_on_data_none_parsed( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test on data with None parsed.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + await mock_nice_go.event.call_args[0][0]( + { + "data": { + "devicesStatesUpdateFeed": { + "item": { + "deviceId": "1", + "desired": '{"key": "value"}', + "reported": '{"displayName":"test-display-name", "migrationStatus":"NOT_STARTED"}', + "connectionState": { + "connected": None, + "updatedTimestamp": None, + }, + "version": None, + "timestamp": None, + } + } + } + } + ) + + assert hass.states.get("cover.test_garage_1") == snapshot + + +async def test_on_connected( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test on connected.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.event.call_count == 2 + + mock_nice_go.subscribe = AsyncMock() + await mock_nice_go.event.call_args_list[0][0][0]() + + assert mock_nice_go.subscribe.call_count == 1 + + +async def test_no_connection_state( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test parsing barrier with no connection state.""" + + mock_nice_go.event = MagicMock() + + await setup_integration(hass, mock_config_entry, [Platform.COVER]) + + assert mock_nice_go.event.call_count == 2 + + await mock_nice_go.event.call_args[0][0]( + { + "data": { + "devicesStatesUpdateFeed": { + "item": { + "deviceId": "1", + "desired": '{"key": "value"}', + "reported": '{"displayName":"Test Garage 1", "migrationStatus":"DONE", "barrierStatus": "1,100,0", "deviceFwVersion": "1.0.0", "lightStatus": "1,100", "vcnMode": false}', + "connectionState": None, + "version": None, + "timestamp": None, + } + } + } + } + ) + + assert hass.states.get("cover.test_garage_1").state == "unavailable" diff --git a/tests/components/nice_go/test_light.py b/tests/components/nice_go/test_light.py new file mode 100644 index 00000000000..e1852581fe6 --- /dev/null +++ b/tests/components/nice_go/test_light.py @@ -0,0 +1,88 @@ +"""Test Nice G.O. light.""" + +from unittest.mock import AsyncMock + +from syrupy import SnapshotAssertion + +from homeassistant.components.light import ( + DOMAIN as LIGHT_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.components.nice_go.const import DOMAIN +from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, load_json_object_fixture, snapshot_platform + + +async def test_data( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that data gets parsed and returned appropriately.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_turn_on( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that turning on the light works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "light.test_garage_2_light"}, + blocking=True, + ) + + assert mock_nice_go.light_on.call_count == 1 + + +async def test_turn_off( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test that turning off the light works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + await hass.services.async_call( + LIGHT_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "light.test_garage_1_light"}, + blocking=True, + ) + + assert mock_nice_go.light_off.call_count == 1 + + +async def test_update_light_state( + hass: HomeAssistant, + mock_nice_go: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that turning off the light works as intended.""" + + await setup_integration(hass, mock_config_entry, [Platform.LIGHT]) + + assert hass.states.get("light.test_garage_1_light").state == STATE_ON + assert hass.states.get("light.test_garage_2_light").state == STATE_OFF + + device_update = load_json_object_fixture("device_state_update.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update) + device_update_1 = load_json_object_fixture("device_state_update_1.json", DOMAIN) + await mock_config_entry.runtime_data.on_data(device_update_1) + + assert hass.states.get("light.test_garage_1_light").state == STATE_OFF + assert hass.states.get("light.test_garage_2_light").state == STATE_ON diff --git a/tests/components/nice_go/test_switch.py b/tests/components/nice_go/test_switch.py new file mode 100644 index 00000000000..f34cba495c9 --- /dev/null +++ b/tests/components/nice_go/test_switch.py @@ -0,0 +1,43 @@ +"""Nice G.O. switch tests.""" + +from unittest.mock import AsyncMock + +from homeassistant.components.switch import ( + DOMAIN as SWITCH_DOMAIN, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, +) +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant + +from . import setup_integration + +from tests.common import MockConfigEntry + + +async def test_turn_on( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test turn on switch.""" + await setup_integration(hass, mock_config_entry, [Platform.SWITCH]) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: "switch.test_garage_1_vacation_mode"}, + blocking=True, + ) + mock_nice_go.vacation_mode_on.assert_called_once_with("1") + + +async def test_turn_off( + hass: HomeAssistant, mock_nice_go: AsyncMock, mock_config_entry: MockConfigEntry +) -> None: + """Test turn off switch.""" + await setup_integration(hass, mock_config_entry, [Platform.SWITCH]) + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: "switch.test_garage_2_vacation_mode"}, + blocking=True, + ) + mock_nice_go.vacation_mode_off.assert_called_once_with("2") diff --git a/tests/components/notify/common.py b/tests/components/notify/common.py index 418de96d1aa..1b5c0d6d6ba 100644 --- a/tests/components/notify/common.py +++ b/tests/components/notify/common.py @@ -4,6 +4,8 @@ All containing methods are legacy helpers that should not be used by new components. Instead call the service directly. """ +from typing import Any + from homeassistant.components.notify import ( ATTR_DATA, ATTR_MESSAGE, @@ -11,11 +13,14 @@ from homeassistant.components.notify import ( DOMAIN, SERVICE_NOTIFY, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def send_message(hass, message, title=None, data=None): +def send_message( + hass: HomeAssistant, message: str, title: str | None = None, data: Any = None +) -> None: """Send a notification message.""" info = {ATTR_MESSAGE: message} diff --git a/tests/components/notify/test_legacy.py b/tests/components/notify/test_legacy.py index b499486b312..79a1b75dcae 100644 --- a/tests/components/notify/test_legacy.py +++ b/tests/components/notify/test_legacy.py @@ -1,7 +1,7 @@ """The tests for legacy notify services.""" import asyncio -from collections.abc import Mapping +from collections.abc import Callable, Coroutine, Mapping from pathlib import Path from typing import Any from unittest.mock import MagicMock, Mock, patch @@ -63,8 +63,16 @@ def mock_notify_platform( hass: HomeAssistant, tmp_path: Path, integration: str = "notify", - async_get_service: Any = None, - get_service: Any = None, + async_get_service: Callable[ + [HomeAssistant, ConfigType, DiscoveryInfoType | None], + Coroutine[Any, Any, notify.BaseNotificationService], + ] + | None = None, + get_service: Callable[ + [HomeAssistant, ConfigType, DiscoveryInfoType | None], + notify.BaseNotificationService, + ] + | None = None, ): """Specialize the mock platform for legacy notify service.""" loaded_platform = MockNotifyPlatform(async_get_service, get_service) @@ -263,9 +271,13 @@ async def test_platform_setup_with_error( ) -> None: """Test service setup with an invalid setup.""" - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> notify.BaseNotificationService | None: """Return None for an invalid notify service.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_notify_platform( hass, tmp_path, "testnotify", async_get_service=async_get_service @@ -283,11 +295,15 @@ async def test_platform_setup_with_error( async def test_reload_with_notify_builtin_platform_reload( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path + hass: HomeAssistant, tmp_path: Path ) -> None: """Test reload using the legacy notify platform reload method.""" - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") @@ -314,19 +330,25 @@ async def test_reload_with_notify_builtin_platform_reload( assert hass.services.has_service(notify.DOMAIN, "testnotify_b") -async def test_setup_platform_and_reload( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path -) -> None: +async def test_setup_platform_and_reload(hass: HomeAssistant, tmp_path: Path) -> None: """Test service setup and reload.""" get_service_called = Mock() - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2(hass, config, discovery_info=None): + async def async_get_service2( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get legacy notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} @@ -405,18 +427,26 @@ async def test_setup_platform_and_reload( async def test_setup_platform_before_notify_setup( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path + hass: HomeAssistant, tmp_path: Path ) -> None: """Test trying to setup a platform before legacy notify service is setup.""" get_service_called = Mock() - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2(hass, config, discovery_info=None): + async def async_get_service2( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} @@ -455,18 +485,26 @@ async def test_setup_platform_before_notify_setup( async def test_setup_platform_after_notify_setup( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, tmp_path: Path + hass: HomeAssistant, tmp_path: Path ) -> None: """Test trying to setup a platform after legacy notify service is set up.""" get_service_called = Mock() - async def async_get_service(hass, config, discovery_info=None): + async def async_get_service( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"a": 1, "b": 2} return NotificationService(hass, targetlist, "testnotify") - async def async_get_service2(hass, config, discovery_info=None): + async def async_get_service2( + hass: HomeAssistant, + config: ConfigType, + discovery_info: DiscoveryInfoType | None = None, + ) -> NotificationService: """Get notify service for mocked platform.""" get_service_called(config, discovery_info) targetlist = {"c": 3, "d": 4} diff --git a/tests/components/notion/test_config_flow.py b/tests/components/notion/test_config_flow.py index 2cc5e3f04b7..15c211c19cb 100644 --- a/tests/components/notion/test_config_flow.py +++ b/tests/components/notion/test_config_flow.py @@ -6,13 +6,15 @@ from aionotion.errors import InvalidCredentialsError, NotionError import pytest from homeassistant.components.notion import CONF_REFRESH_TOKEN, CONF_USER_UUID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_REFRESH_TOKEN, TEST_USER_UUID, TEST_USERNAME +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -90,21 +92,13 @@ async def test_duplicate_error(hass: HomeAssistant, config, config_entry) -> Non async def test_reauth( hass: HomeAssistant, config, - config_entry, + config_entry: MockConfigEntry, errors, get_client_with_exception, mock_aionotion, ) -> None: """Test that re-auth works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=config, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" # Test errors that can arise when getting a Notion API client: diff --git a/tests/components/nsw_fuel_station/test_sensor.py b/tests/components/nsw_fuel_station/test_sensor.py index 898d5757870..dbf52d937f0 100644 --- a/tests/components/nsw_fuel_station/test_sensor.py +++ b/tests/components/nsw_fuel_station/test_sensor.py @@ -23,7 +23,9 @@ VALID_CONFIG_EXPECTED_ENTITY_IDS = ["my_fake_station_p95", "my_fake_station_e10" class MockPrice: """Mock Price implementation.""" - def __init__(self, price, fuel_type, last_updated, price_unit, station_code): + def __init__( + self, price, fuel_type, last_updated, price_unit, station_code + ) -> None: """Initialize a mock price instance.""" self.price = price self.fuel_type = fuel_type @@ -35,7 +37,7 @@ class MockPrice: class MockStation: """Mock Station implementation.""" - def __init__(self, name, code): + def __init__(self, name, code) -> None: """Initialize a mock Station instance.""" self.name = name self.code = code @@ -44,7 +46,7 @@ class MockStation: class MockGetFuelPricesResponse: """Mock GetFuelPricesResponse implementation.""" - def __init__(self, prices, stations): + def __init__(self, prices, stations) -> None: """Initialize a mock GetFuelPricesResponse instance.""" self.prices = prices self.stations = stations diff --git a/tests/components/nuki/test_config_flow.py b/tests/components/nuki/test_config_flow.py index cdd429c40c5..d4ddc261f1e 100644 --- a/tests/components/nuki/test_config_flow.py +++ b/tests/components/nuki/test_config_flow.py @@ -210,9 +210,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: """Test starting a reauthentication flow.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -241,9 +239,7 @@ async def test_reauth_invalid_auth(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with invalid auth.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -265,9 +261,7 @@ async def test_reauth_cannot_connect(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with cannot connect.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -289,9 +283,7 @@ async def test_reauth_unknown_exception(hass: HomeAssistant) -> None: """Test starting a reauthentication flow with an unknown exception.""" entry = await setup_nuki_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/numato/numato_mock.py b/tests/components/numato/numato_mock.py index 097a785beb1..208beffe83f 100644 --- a/tests/components/numato/numato_mock.py +++ b/tests/components/numato/numato_mock.py @@ -8,14 +8,14 @@ class NumatoModuleMock: NumatoGpioError = NumatoGpioError - def __init__(self): + def __init__(self) -> None: """Initialize the numato_gpio module mockup class.""" self.devices = {} class NumatoDeviceMock: """Mockup for the numato_gpio.NumatoUsbGpio class.""" - def __init__(self, device): + def __init__(self, device) -> None: """Initialize numato device mockup.""" self.device = device self.callbacks = {} diff --git a/tests/components/number/test_init.py b/tests/components/number/test_init.py index 55dad2506f1..721b531e8cd 100644 --- a/tests/components/number/test_init.py +++ b/tests/components/number/test_init.py @@ -121,7 +121,7 @@ class MockNumberEntityDescr(NumberEntity): Step is calculated based on the smaller max_value and min_value. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -145,7 +145,7 @@ class MockNumberEntityAttrWithDescription(NumberEntity): members take precedence over the entity description. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", @@ -223,7 +223,7 @@ class MockNumberEntityDescrDeprecated(NumberEntity): Step is calculated based on the smaller max_value and min_value. """ - def __init__(self): + def __init__(self) -> None: """Initialize the clas instance.""" self.entity_description = NumberEntityDescription( "test", diff --git a/tests/components/nx584/test_binary_sensor.py b/tests/components/nx584/test_binary_sensor.py index 9261521f850..d59cbdcf69d 100644 --- a/tests/components/nx584/test_binary_sensor.py +++ b/tests/components/nx584/test_binary_sensor.py @@ -1,5 +1,6 @@ """The tests for the nx584 sensor platform.""" +from typing import Any from unittest import mock from nx584 import client as nx584_client @@ -99,7 +100,9 @@ def test_nx584_sensor_setup_full_config( assert mock_watcher.called -async def _test_assert_graceful_fail(hass, config): +async def _test_assert_graceful_fail( + hass: HomeAssistant, config: dict[str, Any] +) -> None: """Test the failing.""" assert not await async_setup_component(hass, "nx584", config) @@ -114,7 +117,9 @@ async def _test_assert_graceful_fail(hass, config): ({"zone_types": {"notazone": "motion"}}), ], ) -async def test_nx584_sensor_setup_bad_config(hass: HomeAssistant, config) -> None: +async def test_nx584_sensor_setup_bad_config( + hass: HomeAssistant, config: dict[str, Any] +) -> None: """Test the setup with bad configuration.""" await _test_assert_graceful_fail(hass, config) diff --git a/tests/components/nzbget/conftest.py b/tests/components/nzbget/conftest.py index 8f48a4306c7..8a980d3ddb0 100644 --- a/tests/components/nzbget/conftest.py +++ b/tests/components/nzbget/conftest.py @@ -1,5 +1,6 @@ """Define fixtures available for all tests.""" +from collections.abc import Generator from unittest.mock import MagicMock, patch import pytest @@ -8,7 +9,7 @@ from . import MOCK_HISTORY, MOCK_STATUS, MOCK_VERSION @pytest.fixture -def nzbget_api(hass): +def nzbget_api() -> Generator[MagicMock]: """Mock NZBGetApi for easier testing.""" with patch("homeassistant.components.nzbget.coordinator.NZBGetAPI") as mock_api: instance = mock_api.return_value diff --git a/tests/components/nzbget/test_init.py b/tests/components/nzbget/test_init.py index a119bb953ce..baf0a37546d 100644 --- a/tests/components/nzbget/test_init.py +++ b/tests/components/nzbget/test_init.py @@ -3,6 +3,7 @@ from unittest.mock import patch from pynzbgetapi import NZBGetAPIException +import pytest from homeassistant.components.nzbget.const import DOMAIN from homeassistant.config_entries import ConfigEntryState @@ -13,7 +14,8 @@ from . import ENTRY_CONFIG, _patch_version, init_integration from tests.common import MockConfigEntry -async def test_unload_entry(hass: HomeAssistant, nzbget_api) -> None: +@pytest.mark.usefixtures("nzbget_api") +async def test_unload_entry(hass: HomeAssistant) -> None: """Test successful unload of entry.""" entry = await init_integration(hass) diff --git a/tests/components/nzbget/test_sensor.py b/tests/components/nzbget/test_sensor.py index 30a7f262b0b..38f7d8a68c3 100644 --- a/tests/components/nzbget/test_sensor.py +++ b/tests/components/nzbget/test_sensor.py @@ -3,6 +3,8 @@ from datetime import timedelta from unittest.mock import patch +import pytest + from homeassistant.components.sensor import SensorDeviceClass from homeassistant.const import ( ATTR_UNIT_OF_MEASUREMENT, @@ -16,9 +18,8 @@ from homeassistant.util import dt as dt_util from . import init_integration -async def test_sensors( - hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api -) -> None: +@pytest.mark.usefixtures("nzbget_api") +async def test_sensors(hass: HomeAssistant, entity_registry: er.EntityRegistry) -> None: """Test the creation and values of the sensors.""" now = dt_util.utcnow().replace(microsecond=0) with patch("homeassistant.components.nzbget.sensor.utcnow", return_value=now): diff --git a/tests/components/nzbget/test_switch.py b/tests/components/nzbget/test_switch.py index 1c518486b9f..afb88a7be82 100644 --- a/tests/components/nzbget/test_switch.py +++ b/tests/components/nzbget/test_switch.py @@ -1,5 +1,7 @@ """Test the NZBGet switches.""" +from unittest.mock import MagicMock + from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN from homeassistant.const import ( ATTR_ENTITY_ID, @@ -16,7 +18,7 @@ from . import init_integration async def test_download_switch( - hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api + hass: HomeAssistant, entity_registry: er.EntityRegistry, nzbget_api: MagicMock ) -> None: """Test the creation and values of the download switch.""" instance = nzbget_api.return_value @@ -44,7 +46,9 @@ async def test_download_switch( assert state.state == STATE_OFF -async def test_download_switch_services(hass: HomeAssistant, nzbget_api) -> None: +async def test_download_switch_services( + hass: HomeAssistant, nzbget_api: MagicMock +) -> None: """Test download switch services.""" instance = nzbget_api.return_value diff --git a/tests/components/octoprint/test_config_flow.py b/tests/components/octoprint/test_config_flow.py index 738fbea0887..e0696486718 100644 --- a/tests/components/octoprint/test_config_flow.py +++ b/tests/components/octoprint/test_config_flow.py @@ -580,15 +580,7 @@ async def test_reauth_form(hass: HomeAssistant) -> None: unique_id="1234", ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "entry_id": entry.entry_id, - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] diff --git a/tests/components/ollama/conftest.py b/tests/components/ollama/conftest.py index b28b8850cd5..7658d1cbfab 100644 --- a/tests/components/ollama/conftest.py +++ b/tests/components/ollama/conftest.py @@ -1,5 +1,6 @@ """Tests Ollama integration.""" +from typing import Any from unittest.mock import patch import pytest @@ -16,12 +17,20 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: +def mock_config_entry_options() -> dict[str, Any]: + """Fixture for configuration entry options.""" + return TEST_OPTIONS + + +@pytest.fixture +def mock_config_entry( + hass: HomeAssistant, mock_config_entry_options: dict[str, Any] +) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( domain=ollama.DOMAIN, data=TEST_USER_DATA, - options=TEST_OPTIONS, + options=mock_config_entry_options, ) entry.add_to_hass(hass) return entry diff --git a/tests/components/ollama/test_config_flow.py b/tests/components/ollama/test_config_flow.py index b1b74197139..7755f2208b4 100644 --- a/tests/components/ollama/test_config_flow.py +++ b/tests/components/ollama/test_config_flow.py @@ -164,13 +164,18 @@ async def test_options( ) options = await hass.config_entries.options.async_configure( options_flow["flow_id"], - {ollama.CONF_PROMPT: "test prompt", ollama.CONF_MAX_HISTORY: 100}, + { + ollama.CONF_PROMPT: "test prompt", + ollama.CONF_MAX_HISTORY: 100, + ollama.CONF_NUM_CTX: 32768, + }, ) await hass.async_block_till_done() assert options["type"] is FlowResultType.CREATE_ENTRY assert options["data"] == { ollama.CONF_PROMPT: "test prompt", ollama.CONF_MAX_HISTORY: 100, + ollama.CONF_NUM_CTX: 32768, } diff --git a/tests/components/ollama/test_conversation.py b/tests/components/ollama/test_conversation.py index cb56b398342..f10805e747d 100644 --- a/tests/components/ollama/test_conversation.py +++ b/tests/components/ollama/test_conversation.py @@ -578,3 +578,34 @@ async def test_conversation_agent_with_assist( state.attributes[ATTR_SUPPORTED_FEATURES] == conversation.ConversationEntityFeature.CONTROL ) + + +@pytest.mark.parametrize( + ("mock_config_entry_options", "expected_options"), + [ + ({}, {"num_ctx": 8192}), + ({"num_ctx": 16384}, {"num_ctx": 16384}), + ], +) +async def test_options( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_init_component, + expected_options: dict[str, Any], +) -> None: + """Test that options are passed correctly to ollama client.""" + with patch( + "ollama.AsyncClient.chat", + return_value={"message": {"role": "assistant", "content": "test response"}}, + ) as mock_chat: + await conversation.async_converse( + hass, + "test message", + None, + Context(), + agent_id="conversation.mock_title", + ) + + assert mock_chat.call_count == 1 + args = mock_chat.call_args.kwargs + assert args.get("options") == expected_options diff --git a/tests/components/onvif/__init__.py b/tests/components/onvif/__init__.py index 0857dfef798..8a86538b977 100644 --- a/tests/components/onvif/__init__.py +++ b/tests/components/onvif/__init__.py @@ -151,7 +151,9 @@ def setup_mock_device(mock_device, capabilities=None): pullpoint_manager=MagicMock(state=PullPointManagerState.PAUSED), ) - def mock_constructor(hass, config): + def mock_constructor( + hass: HomeAssistant, config: config_entries.ConfigEntry + ) -> MagicMock: """Fake the controller constructor.""" return mock_device diff --git a/tests/components/onvif/test_config_flow.py b/tests/components/onvif/test_config_flow.py index c0e5a6fe545..f7200aa7a00 100644 --- a/tests/components/onvif/test_config_flow.py +++ b/tests/components/onvif/test_config_flow.py @@ -769,11 +769,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: """Test reauthenticate.""" entry, _, _ = await setup_onvif_integration(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert ( diff --git a/tests/components/openai_conversation/conftest.py b/tests/components/openai_conversation/conftest.py index 6d770b51ce9..4639d0dc8e0 100644 --- a/tests/components/openai_conversation/conftest.py +++ b/tests/components/openai_conversation/conftest.py @@ -13,7 +13,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a config entry.""" entry = MockConfigEntry( title="OpenAI", @@ -27,7 +27,9 @@ def mock_config_entry(hass): @pytest.fixture -def mock_config_entry_with_assist(hass, mock_config_entry): +def mock_config_entry_with_assist( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Mock a config entry with assist.""" hass.config_entries.async_update_entry( mock_config_entry, options={CONF_LLM_HASS_API: llm.LLM_API_ASSIST} @@ -36,7 +38,9 @@ def mock_config_entry_with_assist(hass, mock_config_entry): @pytest.fixture -async def mock_init_component(hass, mock_config_entry): +async def mock_init_component( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: """Initialize integration.""" with patch( "openai.resources.models.AsyncModels.list", diff --git a/tests/components/openalpr_cloud/test_image_processing.py b/tests/components/openalpr_cloud/test_image_processing.py index 7115c3e7bf0..143513f9852 100644 --- a/tests/components/openalpr_cloud/test_image_processing.py +++ b/tests/components/openalpr_cloud/test_image_processing.py @@ -6,7 +6,7 @@ import pytest from homeassistant.components import camera, image_processing as ip from homeassistant.components.openalpr_cloud.image_processing import OPENALPR_API_URL -from homeassistant.core import HomeAssistant +from homeassistant.core import Event, HomeAssistant from homeassistant.setup import async_setup_component from tests.common import assert_setup_component, async_capture_events, load_fixture @@ -15,13 +15,13 @@ from tests.test_util.aiohttp import AiohttpClientMocker @pytest.fixture(autouse=True) -async def setup_homeassistant(hass: HomeAssistant): +async def setup_homeassistant(hass: HomeAssistant) -> None: """Set up the homeassistant integration.""" await async_setup_component(hass, "homeassistant", {}) @pytest.fixture -async def setup_openalpr_cloud(hass): +async def setup_openalpr_cloud(hass: HomeAssistant) -> None: """Set up openalpr cloud.""" config = { ip.DOMAIN: { @@ -43,7 +43,7 @@ async def setup_openalpr_cloud(hass): @pytest.fixture -async def alpr_events(hass): +async def alpr_events(hass: HomeAssistant) -> list[Event]: """Listen for events.""" return async_capture_events(hass, "image_processing.found_plate") diff --git a/tests/components/openexchangerates/test_config_flow.py b/tests/components/openexchangerates/test_config_flow.py index ec06c662201..0d4744c057a 100644 --- a/tests/components/openexchangerates/test_config_flow.py +++ b/tests/components/openexchangerates/test_config_flow.py @@ -200,16 +200,7 @@ async def test_reauth( ) -> None: """Test we can reauthenticate the config entry.""" mock_config_entry.add_to_hass(hass) - flow_context = { - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config_entry.entry_id, - "title_placeholders": {"name": mock_config_entry.title}, - "unique_id": mock_config_entry.unique_id, - } - - result = await hass.config_entries.flow.async_init( - DOMAIN, context=flow_context, data=mock_config_entry.data - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/opentherm_gw/test_config_flow.py b/tests/components/opentherm_gw/test_config_flow.py index 24b41df8124..e61a87bb55e 100644 --- a/tests/components/opentherm_gw/test_config_flow.py +++ b/tests/components/opentherm_gw/test_config_flow.py @@ -223,7 +223,7 @@ async def test_options_migration(hass: HomeAssistant) -> None: with ( patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.connect_and_subscribe", + "homeassistant.components.opentherm_gw.OpenThermGatewayHub.connect_and_subscribe", return_value=True, ), patch( diff --git a/tests/components/opentherm_gw/test_init.py b/tests/components/opentherm_gw/test_init.py index a1ff5b75f47..a466f788f1a 100644 --- a/tests/components/opentherm_gw/test_init.py +++ b/tests/components/opentherm_gw/test_init.py @@ -40,7 +40,7 @@ async def test_device_registry_insert( with ( patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.cleanup", + "homeassistant.components.opentherm_gw.OpenThermGatewayHub.cleanup", return_value=None, ), patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS), @@ -72,7 +72,7 @@ async def test_device_registry_update( with ( patch( - "homeassistant.components.opentherm_gw.OpenThermGatewayDevice.cleanup", + "homeassistant.components.opentherm_gw.OpenThermGatewayHub.cleanup", return_value=None, ), patch("pyotgw.OpenThermGateway.connect", return_value=MINIMAL_STATUS_UPD), diff --git a/tests/components/openuv/conftest.py b/tests/components/openuv/conftest.py index cc344d25ccb..9bb1970bc2f 100644 --- a/tests/components/openuv/conftest.py +++ b/tests/components/openuv/conftest.py @@ -2,6 +2,7 @@ from collections.abc import Generator import json +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -13,6 +14,7 @@ from homeassistant.const import ( CONF_LATITUDE, CONF_LONGITUDE, ) +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -41,7 +43,9 @@ def client_fixture(data_protection_window, data_uv_index): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -54,7 +58,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_API_KEY: TEST_API_KEY, @@ -89,7 +93,9 @@ async def mock_pyopenuv_fixture(client): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_pyopenuv): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_pyopenuv: None +) -> None: """Define a fixture to set up openuv.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/openuv/test_config_flow.py b/tests/components/openuv/test_config_flow.py index 3d31cf53250..182f66c887f 100644 --- a/tests/components/openuv/test_config_flow.py +++ b/tests/components/openuv/test_config_flow.py @@ -7,7 +7,7 @@ import pytest import voluptuous as vol from homeassistant.components.openuv import CONF_FROM_WINDOW, CONF_TO_WINDOW, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_API_KEY, CONF_ELEVATION, @@ -19,6 +19,8 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_API_KEY, TEST_ELEVATION, TEST_LATITUDE, TEST_LONGITUDE +from tests.common import MockConfigEntry + pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -105,12 +107,10 @@ async def test_options_flow( async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/oralb/conftest.py b/tests/components/oralb/conftest.py index c757d79a78e..3e5f38ffb73 100644 --- a/tests/components/oralb/conftest.py +++ b/tests/components/oralb/conftest.py @@ -1,6 +1,7 @@ """OralB session fixtures.""" from collections.abc import Generator +from typing import Any from unittest import mock import pytest @@ -19,7 +20,7 @@ class MockBleakClient: services = MockServices() - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/osoenergy/test_config_flow.py b/tests/components/osoenergy/test_config_flow.py index d9db5888cc3..0b7a3c30cf2 100644 --- a/tests/components/osoenergy/test_config_flow.py +++ b/tests/components/osoenergy/test_config_flow.py @@ -65,15 +65,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: "homeassistant.components.osoenergy.config_flow.OSOEnergy.get_user_email", return_value=None, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config.unique_id, - "entry_id": mock_config.entry_id, - }, - data=mock_config.data, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {"base": "invalid_auth"} diff --git a/tests/components/otbr/__init__.py b/tests/components/otbr/__init__.py index 2c9daa127c2..7d52318b477 100644 --- a/tests/components/otbr/__init__.py +++ b/tests/components/otbr/__init__.py @@ -31,6 +31,7 @@ DATASET_INSECURE_PASSPHRASE = bytes.fromhex( TEST_BORDER_AGENT_EXTENDED_ADDRESS = bytes.fromhex("AEEB2F594B570BBF") TEST_BORDER_AGENT_ID = bytes.fromhex("230C6A1AC57F6F4BE262ACF32E5EF52C") +TEST_BORDER_AGENT_ID_2 = bytes.fromhex("230C6A1AC57F6F4BE262ACF32E5EF52D") ROUTER_DISCOVERY_HASS = { "type_": "_meshcop._udp.local.", diff --git a/tests/components/otbr/conftest.py b/tests/components/otbr/conftest.py index ba0f43c4a71..5ab3e442183 100644 --- a/tests/components/otbr/conftest.py +++ b/tests/components/otbr/conftest.py @@ -1,6 +1,8 @@ """Test fixtures for the Open Thread Border Router integration.""" -from unittest.mock import MagicMock, Mock, patch +from collections.abc import Generator +from typing import Any +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -18,58 +20,94 @@ from . import ( from tests.common import MockConfigEntry +@pytest.fixture(name="enable_compute_pskc") +def enable_compute_pskc_fixture() -> Any: + """Allow controlling if compute_pskc should be enabled.""" + return False + + +@pytest.fixture(name="compute_pskc", autouse=True) +def compute_pskc_fixture(enable_compute_pskc: bool) -> Any: + """Patch homeassistant.components.otbr.util.compute_pskc.""" + compute_pskc = otbr.util.compute_pskc if enable_compute_pskc else None + + with patch( + "homeassistant.components.otbr.util.compute_pskc", side_effect=compute_pskc + ) as compute_pskc_mock: + yield compute_pskc_mock + + +@pytest.fixture(name="dataset") +def dataset_fixture() -> Any: + """Return the discovery info from the supervisor.""" + return DATASET_CH16 + + +@pytest.fixture(name="get_active_dataset_tlvs") +def get_active_dataset_tlvs_fixture(dataset: Any) -> Generator[AsyncMock]: + """Mock get_active_dataset_tlvs.""" + with patch( + "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset + ) as get_active_dataset_tlvs: + yield get_active_dataset_tlvs + + +@pytest.fixture(name="get_border_agent_id") +def get_border_agent_id_fixture() -> Generator[AsyncMock]: + """Mock get_border_agent_id.""" + with patch( + "python_otbr_api.OTBR.get_border_agent_id", return_value=TEST_BORDER_AGENT_ID + ) as get_border_agent_id: + yield get_border_agent_id + + +@pytest.fixture(name="get_extended_address") +def get_extended_address_fixture() -> Generator[AsyncMock]: + """Mock get_extended_address.""" + with patch( + "python_otbr_api.OTBR.get_extended_address", + return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, + ) as get_extended_address: + yield get_extended_address + + @pytest.fixture(name="otbr_config_entry_multipan") -async def otbr_config_entry_multipan_fixture(hass): +async def otbr_config_entry_multipan_fixture( + hass: HomeAssistant, + get_active_dataset_tlvs: AsyncMock, + get_border_agent_id: AsyncMock, + get_extended_address: AsyncMock, +) -> str: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, domain=otbr.DOMAIN, options={}, title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) - with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), - patch("homeassistant.components.otbr.util.compute_pskc"), - ): # Patch to speed up tests - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) + return config_entry.entry_id @pytest.fixture(name="otbr_config_entry_thread") -async def otbr_config_entry_thread_fixture(hass): +async def otbr_config_entry_thread_fixture( + hass: HomeAssistant, + get_active_dataset_tlvs: AsyncMock, + get_border_agent_id: AsyncMock, + get_extended_address: AsyncMock, +) -> None: """Mock Open Thread Border Router config entry.""" config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_THREAD, domain=otbr.DOMAIN, options={}, title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) - with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), - patch("homeassistant.components.otbr.util.compute_pskc"), - ): # Patch to speed up tests - assert await hass.config_entries.async_setup(config_entry.entry_id) + assert await hass.config_entries.async_setup(config_entry.entry_id) @pytest.fixture(autouse=True) diff --git a/tests/components/otbr/test_config_flow.py b/tests/components/otbr/test_config_flow.py index 224f77931e5..edd92591b1b 100644 --- a/tests/components/otbr/test_config_flow.py +++ b/tests/components/otbr/test_config_flow.py @@ -13,7 +13,7 @@ from homeassistant.components import hassio, otbr from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -from . import DATASET_CH15, DATASET_CH16 +from . import DATASET_CH15, DATASET_CH16, TEST_BORDER_AGENT_ID, TEST_BORDER_AGENT_ID_2 from tests.common import MockConfigEntry, MockModule, mock_integration from tests.test_util.aiohttp import AiohttpClientMocker @@ -49,17 +49,104 @@ def addon_info_fixture(): yield addon_info +@pytest.mark.parametrize( + "url", + [ + "http://custom_url:1234", + "http://custom_url:1234/", + "http://custom_url:1234//", + ], +) +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_border_agent_id", +) async def test_user_flow( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, url: str ) -> None: """Test the user flow.""" - url = "http://custom_url:1234" - aioclient_mock.get(f"{url}/node/dataset/active", text="aa") + await _finish_user_flow(hass, url) + + +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_extended_address", +) +async def test_user_flow_additional_entry( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test more than a single entry is allowed.""" + url1 = "http://custom_url:1234" + url2 = "http://custom_url_2:1234" + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) + + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": url2}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID_2.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Do a user flow + await _finish_user_flow(hass) + + +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_extended_address", +) +async def test_user_flow_additional_entry_fail_get_address( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test more than a single entry is allowed. + + This tets the behavior when we can't read the extended address from the existing + config entry. + """ + url1 = "http://custom_url:1234" + url2 = "http://custom_url_2:1234" + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) + + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": url2}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID_2.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Do a user flow + aioclient_mock.clear_requests() + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", status=HTTPStatus.NOT_FOUND) + await _finish_user_flow(hass) + assert f"Could not read border agent id from {url2}" in caplog.text + + +async def _finish_user_flow( + hass: HomeAssistant, url: str = "http://custom_url:1234" +) -> None: + """Finish a user flow.""" + stripped_url = "http://custom_url:1234" result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} ) - expected_data = {"url": url} + expected_data = {"url": stripped_url} assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -80,13 +167,56 @@ async def test_user_flow( assert result["options"] == {} assert len(mock_setup_entry.mock_calls) == 1 - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + config_entry = result["result"] assert config_entry.data == expected_data assert config_entry.options == {} assert config_entry.title == "Open Thread Border Router" - assert config_entry.unique_id == otbr.DOMAIN + assert config_entry.unique_id == TEST_BORDER_AGENT_ID.hex() +@pytest.mark.usefixtures( + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) +async def test_user_flow_additional_entry_same_address( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker +) -> None: + """Test more than a single entry is allowed.""" + mock_integration(hass, MockModule("hassio")) + + # Setup a config entry + config_entry = MockConfigEntry( + data={"url": "http://custom_url:1234"}, + domain=otbr.DOMAIN, + options={}, + title="Open Thread Border Router", + unique_id=TEST_BORDER_AGENT_ID.hex(), + ) + config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(config_entry.entry_id) + + # Start user flow + url = "http://custom_url:1234" + aioclient_mock.get(f"{url}/node/dataset/active", text="aa") + result = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "user"} + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + "url": url, + }, + ) + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "already_configured"} + + +@pytest.mark.usefixtures("get_border_agent_id") async def test_user_flow_router_not_setup( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: @@ -150,10 +280,11 @@ async def test_user_flow_router_not_setup( assert config_entry.data == expected_data assert config_entry.options == {} assert config_entry.title == "Open Thread Border Router" - assert config_entry.unique_id == otbr.DOMAIN + assert config_entry.unique_id == TEST_BORDER_AGENT_ID.hex() -async def test_user_flow_404( +@pytest.mark.usefixtures("get_border_agent_id") +async def test_user_flow_get_dataset_404( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: """Test the user flow.""" @@ -184,7 +315,30 @@ async def test_user_flow_404( aiohttp.ClientError, ], ) -async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: +async def test_user_flow_get_ba_id_connect_error( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, error +) -> None: + """Test the user flow.""" + await _test_user_flow_connect_error(hass, "get_border_agent_id", error) + + +@pytest.mark.usefixtures("get_border_agent_id") +@pytest.mark.parametrize( + "error", + [ + TimeoutError, + python_otbr_api.OTBRError, + aiohttp.ClientError, + ], +) +async def test_user_flow_get_dataset_connect_error( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, error +) -> None: + """Test the user flow.""" + await _test_user_flow_connect_error(hass, "get_active_dataset_tlvs", error) + + +async def _test_user_flow_connect_error(hass: HomeAssistant, func, error) -> None: """Test the user flow.""" result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": "user"} @@ -193,7 +347,7 @@ async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error): + with patch(f"python_otbr_api.OTBR.{func}", side_effect=error): result = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -204,6 +358,7 @@ async def test_user_flow_connect_error(hass: HomeAssistant, error) -> None: assert result["errors"] == {"base": "cannot_connect"} +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -236,6 +391,7 @@ async def test_hassio_discovery_flow( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_yellow( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -293,6 +449,7 @@ async def test_hassio_discovery_flow_yellow( ), ], ) +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_sky_connect( device: str, title: str, @@ -338,6 +495,7 @@ async def test_hassio_discovery_flow_sky_connect( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_active_dataset_tlvs", "get_extended_address") async def test_hassio_discovery_flow_2x_addons( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -346,8 +504,10 @@ async def test_hassio_discovery_flow_2x_addons( url2 = "http://core-silabs-multiprotocol_2:8081" aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID_2.hex()) - async def _addon_info(hass, slug): + async def _addon_info(hass: HomeAssistant, slug: str) -> dict[str, Any]: await asyncio.sleep(0) if slug == "otbr": return { @@ -379,18 +539,107 @@ async def test_hassio_discovery_flow_2x_addons( addon_info.side_effect = _addon_info - with patch( - "homeassistant.components.otbr.async_setup_entry", - return_value=True, - ) as mock_setup_entry: - result1 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA - ) - result2 = await hass.config_entries.flow.async_init( - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 - ) + result1 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA + ) + result2 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 + ) - results = [result1, result2] + results = [result1, result2] + + expected_data = { + "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", + } + expected_data_2 = { + "url": f"http://{HASSIO_DATA_2.config['host']}:{HASSIO_DATA_2.config['port']}", + } + + assert results[0]["type"] is FlowResultType.CREATE_ENTRY + assert ( + results[0]["title"] == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert results[0]["data"] == expected_data + assert results[0]["options"] == {} + + assert results[1]["type"] is FlowResultType.CREATE_ENTRY + assert ( + results[1]["title"] == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert results[1]["data"] == expected_data_2 + assert results[1]["options"] == {} + + assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 + + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + assert config_entry.data == expected_data + assert config_entry.options == {} + assert ( + config_entry.title == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert config_entry.unique_id == HASSIO_DATA.uuid + + config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[1] + assert config_entry.data == expected_data_2 + assert config_entry.options == {} + assert ( + config_entry.title == "Home Assistant SkyConnect (Silicon Labs Multiprotocol)" + ) + assert config_entry.unique_id == HASSIO_DATA_2.uuid + + +@pytest.mark.usefixtures("get_active_dataset_tlvs", "get_extended_address") +async def test_hassio_discovery_flow_2x_addons_same_ext_address( + hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info +) -> None: + """Test the hassio discovery flow when the user has 2 addons with otbr support.""" + url1 = "http://core-silabs-multiprotocol:8081" + url2 = "http://core-silabs-multiprotocol_2:8081" + aioclient_mock.get(f"{url1}/node/dataset/active", text="aa") + aioclient_mock.get(f"{url2}/node/dataset/active", text="bb") + aioclient_mock.get(f"{url1}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + aioclient_mock.get(f"{url2}/node/ba-id", json=TEST_BORDER_AGENT_ID.hex()) + + async def _addon_info(hass: HomeAssistant, slug: str) -> dict[str, Any]: + await asyncio.sleep(0) + if slug == "otbr": + return { + "available": True, + "hostname": None, + "options": { + "device": ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port0" + ) + }, + "state": None, + "update_available": False, + "version": None, + } + return { + "available": True, + "hostname": None, + "options": { + "device": ( + "/dev/serial/by-id/usb-Nabu_Casa_SkyConnect_v1.0_" + "9e2adbd75b8beb119fe564a0f320645d-if00-port1" + ) + }, + "state": None, + "update_available": False, + "version": None, + } + + addon_info.side_effect = _addon_info + + result1 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA + ) + result2 = await hass.config_entries.flow.async_init( + otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA_2 + ) + + results = [result1, result2] expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -403,9 +652,8 @@ async def test_hassio_discovery_flow_2x_addons( assert results[0]["data"] == expected_data assert results[0]["options"] == {} assert results[1]["type"] is FlowResultType.ABORT - assert results[1]["reason"] == "single_instance_allowed" + assert results[1]["reason"] == "already_configured" assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 - assert len(mock_setup_entry.mock_calls) == 1 config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] assert config_entry.data == expected_data @@ -416,6 +664,7 @@ async def test_hassio_discovery_flow_2x_addons( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -473,6 +722,7 @@ async def test_hassio_discovery_flow_router_not_setup( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup_has_preferred( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, addon_info ) -> None: @@ -525,6 +775,7 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_router_not_setup_has_preferred_2( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, @@ -588,6 +839,7 @@ async def test_hassio_discovery_flow_router_not_setup_has_preferred_2( assert config_entry.unique_id == HASSIO_DATA.uuid +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_404( hass: HomeAssistant, aioclient_mock: AiohttpClientMocker ) -> None: @@ -602,6 +854,7 @@ async def test_hassio_discovery_flow_404( assert result["reason"] == "unknown" +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_new_port_missing_unique_id( hass: HomeAssistant, ) -> None: @@ -625,7 +878,7 @@ async def test_hassio_discovery_flow_new_port_missing_unique_id( ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert result["reason"] == "already_configured" expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -634,6 +887,7 @@ async def test_hassio_discovery_flow_new_port_missing_unique_id( assert config_entry.data == expected_data +@pytest.mark.usefixtures("get_border_agent_id") async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: """Test the port can be updated.""" mock_integration(hass, MockModule("hassio")) @@ -656,7 +910,7 @@ async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: ) assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + assert result["reason"] == "already_configured" expected_data = { "url": f"http://{HASSIO_DATA.config['host']}:{HASSIO_DATA.config['port']}", @@ -665,6 +919,12 @@ async def test_hassio_discovery_flow_new_port(hass: HomeAssistant) -> None: assert config_entry.data == expected_data +@pytest.mark.usefixtures( + "addon_info", + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) async def test_hassio_discovery_flow_new_port_other_addon(hass: HomeAssistant) -> None: """Test the port is not updated if we get data for another addon hosting OTBR.""" mock_integration(hass, MockModule("hassio")) @@ -683,22 +943,34 @@ async def test_hassio_discovery_flow_new_port_other_addon(hass: HomeAssistant) - otbr.DOMAIN, context={"source": "hassio"}, data=HASSIO_DATA ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" + # Another entry will be created + assert result["type"] is FlowResultType.CREATE_ENTRY - # Make sure the data was not updated + # Make sure the data of the existing entry was not updated expected_data = { "url": f"http://openthread_border_router:{HASSIO_DATA.config['port']+1}", } - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] + config_entry = hass.config_entries.async_get_entry(config_entry.entry_id) assert config_entry.data == expected_data -@pytest.mark.parametrize(("source", "data"), [("hassio", HASSIO_DATA), ("user", None)]) -async def test_config_flow_single_entry( - hass: HomeAssistant, source: str, data: Any +@pytest.mark.parametrize( + ("source", "data", "expected_result"), + [ + ("hassio", HASSIO_DATA, FlowResultType.CREATE_ENTRY), + ("user", None, FlowResultType.FORM), + ], +) +@pytest.mark.usefixtures( + "addon_info", + "get_active_dataset_tlvs", + "get_border_agent_id", + "get_extended_address", +) +async def test_config_flow_additional_entry( + hass: HomeAssistant, source: str, data: Any, expected_result: FlowResultType ) -> None: - """Test only a single entry is allowed.""" + """Test more than a single entry is allowed.""" mock_integration(hass, MockModule("hassio")) # Setup the config entry @@ -711,13 +983,11 @@ async def test_config_flow_single_entry( config_entry.add_to_hass(hass) with patch( - "homeassistant.components.homeassistant_yellow.async_setup_entry", + "homeassistant.components.otbr.async_setup_entry", return_value=True, - ) as mock_setup_entry: + ): result = await hass.config_entries.flow.async_init( otbr.DOMAIN, context={"source": source}, data=data ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "single_instance_allowed" - mock_setup_entry.assert_not_called() + assert result["type"] is expected_result diff --git a/tests/components/otbr/test_init.py b/tests/components/otbr/test_init.py index 0c56e9ac8da..ca1cbd6483b 100644 --- a/tests/components/otbr/test_init.py +++ b/tests/components/otbr/test_init.py @@ -1,7 +1,6 @@ """Test the Open Thread Border Router integration.""" import asyncio -from http import HTTPStatus from typing import Any from unittest.mock import ANY, AsyncMock, MagicMock, patch @@ -12,15 +11,14 @@ from zeroconf.asyncio import AsyncServiceInfo from homeassistant.components import otbr, thread from homeassistant.components.thread import discovery +from homeassistant.config_entries import SOURCE_HASSIO, SOURCE_USER from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from . import ( BASE_URL, CONFIG_ENTRY_DATA_MULTIPAN, - CONFIG_ENTRY_DATA_THREAD, DATASET_CH15, DATASET_CH16, DATASET_INSECURE_NW_KEY, @@ -40,6 +38,15 @@ DATASET_NO_CHANNEL = bytes.fromhex( ) +@pytest.fixture(name="enable_mocks", autouse=True) +def enable_mocks_fixture( + get_active_dataset_tlvs: AsyncMock, + get_border_agent_id: AsyncMock, + get_extended_address: AsyncMock, +) -> None: + """Enable API mocks.""" + + async def test_import_dataset( hass: HomeAssistant, mock_async_zeroconf: MagicMock, @@ -64,21 +71,11 @@ async def test_import_dataset( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.BORDER_AGENT_DISCOVERY_TIMEOUT", 0.1, @@ -142,20 +139,10 @@ async def test_import_share_radio_channel_collision( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -192,18 +179,10 @@ async def test_import_share_radio_no_channel_collision( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( - patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -222,6 +201,7 @@ async def test_import_share_radio_no_channel_collision( ) +@pytest.mark.parametrize("enable_compute_pskc", [True]) @pytest.mark.parametrize( "dataset", [DATASET_INSECURE_NW_KEY, DATASET_INSECURE_PASSPHRASE] ) @@ -237,18 +217,10 @@ async def test_import_insecure_dataset( domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) with ( - patch("python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=dataset), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - return_value=TEST_BORDER_AGENT_ID, - ), - patch( - "python_otbr_api.OTBR.get_extended_address", - return_value=TEST_BORDER_AGENT_EXTENDED_ADDRESS, - ), patch( "homeassistant.components.thread.dataset_store.DatasetStore.async_add" ) as mock_add, @@ -274,7 +246,9 @@ async def test_import_insecure_dataset( aiohttp.ClientError, ], ) -async def test_config_entry_not_ready(hass: HomeAssistant, error) -> None: +async def test_config_entry_not_ready( + hass: HomeAssistant, get_active_dataset_tlvs: AsyncMock, error +) -> None: """Test raising ConfigEntryNotReady .""" config_entry = MockConfigEntry( @@ -282,13 +256,16 @@ async def test_config_entry_not_ready(hass: HomeAssistant, error) -> None: domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) - with patch("python_otbr_api.OTBR.get_active_dataset_tlvs", side_effect=error): - assert not await hass.config_entries.async_setup(config_entry.entry_id) + get_active_dataset_tlvs.side_effect = error + assert not await hass.config_entries.async_setup(config_entry.entry_id) -async def test_border_agent_id_not_supported(hass: HomeAssistant) -> None: +async def test_border_agent_id_not_supported( + hass: HomeAssistant, get_border_agent_id: AsyncMock +) -> None: """Test border router does not support border agent ID.""" config_entry = MockConfigEntry( @@ -296,18 +273,11 @@ async def test_border_agent_id_not_supported(hass: HomeAssistant) -> None: domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) - with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch( - "python_otbr_api.OTBR.get_border_agent_id", - side_effect=python_otbr_api.GetBorderAgentIdNotSupportedError, - ), - ): - assert not await hass.config_entries.async_setup(config_entry.entry_id) + get_border_agent_id.side_effect = python_otbr_api.GetBorderAgentIdNotSupportedError + assert not await hass.config_entries.async_setup(config_entry.entry_id) async def test_config_entry_update(hass: HomeAssistant) -> None: @@ -317,6 +287,7 @@ async def test_config_entry_update(hass: HomeAssistant) -> None: domain=otbr.DOMAIN, options={}, title="My OTBR", + unique_id=TEST_BORDER_AGENT_EXTENDED_ADDRESS.hex(), ) config_entry.add_to_hass(hass) mock_api = MagicMock() @@ -346,104 +317,37 @@ async def test_remove_entry( aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="0E") - assert await otbr.async_get_active_dataset_tlvs(hass) == bytes.fromhex("0E") - config_entry = hass.config_entries.async_entries(otbr.DOMAIN)[0] await hass.config_entries.async_remove(config_entry.entry_id) - with pytest.raises(HomeAssistantError): - assert await otbr.async_get_active_dataset_tlvs(hass) - -async def test_get_active_dataset_tlvs( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan +@pytest.mark.parametrize( + ("source", "unique_id", "updated_unique_id"), + [ + (SOURCE_HASSIO, None, None), + (SOURCE_HASSIO, "abcd", "abcd"), + (SOURCE_USER, None, TEST_BORDER_AGENT_ID.hex()), + (SOURCE_USER, "abcd", TEST_BORDER_AGENT_ID.hex()), + ], +) +async def test_update_unique_id( + hass: HomeAssistant, + aioclient_mock: AiohttpClientMocker, + source: str, + unique_id: str | None, + updated_unique_id: str | None, ) -> None: - """Test async_get_active_dataset_tlvs.""" + """Test we update the unique id if extended address has changed.""" - mock_response = ( - "0E080000000000010000000300001035060004001FFFE00208F642646DA209B1C00708FDF57B5A" - "0FE2AAF60510DE98B5BA1A528FEE049D4B4B01835375030D4F70656E5468726561642048410102" - "25A40410F5DD18371BFD29E1A601EF6FFAD94C030C0402A0F7F8" - ) - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text=mock_response) - - assert await otbr.async_get_active_dataset_tlvs(hass) == bytes.fromhex( - mock_response - ) - - -async def test_get_active_dataset_tlvs_empty( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs.""" - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NO_CONTENT) - assert await otbr.async_get_active_dataset_tlvs(hass) is None - - -async def test_get_active_dataset_tlvs_addon_not_installed(hass: HomeAssistant) -> None: - """Test async_get_active_dataset_tlvs when the multi-PAN addon is not installed.""" - - with pytest.raises(HomeAssistantError): - await otbr.async_get_active_dataset_tlvs(hass) - - -async def test_get_active_dataset_tlvs_404( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs with error.""" - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.NOT_FOUND) - with pytest.raises(HomeAssistantError): - await otbr.async_get_active_dataset_tlvs(hass) - - -async def test_get_active_dataset_tlvs_201( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs with error.""" - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", status=HTTPStatus.CREATED) - with pytest.raises(HomeAssistantError): - assert await otbr.async_get_active_dataset_tlvs(hass) - - -async def test_get_active_dataset_tlvs_invalid( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker, otbr_config_entry_multipan -) -> None: - """Test async_get_active_dataset_tlvs with error.""" - - aioclient_mock.get(f"{BASE_URL}/node/dataset/active", text="unexpected") - with pytest.raises(HomeAssistantError): - assert await otbr.async_get_active_dataset_tlvs(hass) - - -async def test_remove_extra_entries( - hass: HomeAssistant, aioclient_mock: AiohttpClientMocker -) -> None: - """Test we remove additional config entries.""" - - config_entry1 = MockConfigEntry( + config_entry = MockConfigEntry( data=CONFIG_ENTRY_DATA_MULTIPAN, domain=otbr.DOMAIN, options={}, + source=source, title="Open Thread Border Router", + unique_id=unique_id, ) - config_entry2 = MockConfigEntry( - data=CONFIG_ENTRY_DATA_THREAD, - domain=otbr.DOMAIN, - options={}, - title="Open Thread Border Router", - ) - config_entry1.add_to_hass(hass) - config_entry2.add_to_hass(hass) - assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 2 - with ( - patch( - "python_otbr_api.OTBR.get_active_dataset_tlvs", return_value=DATASET_CH16 - ), - patch("homeassistant.components.otbr.util.compute_pskc"), - ): # Patch to speed up tests - assert await async_setup_component(hass, otbr.DOMAIN, {}) - assert len(hass.config_entries.async_entries(otbr.DOMAIN)) == 1 + config_entry.add_to_hass(hass) + assert await async_setup_component(hass, otbr.DOMAIN, {}) + config_entry = hass.config_entries.async_get_entry(config_entry.entry_id) + assert config_entry.unique_id == updated_unique_id diff --git a/tests/components/otbr/test_silabs_multiprotocol.py b/tests/components/otbr/test_silabs_multiprotocol.py index 8d7bed13df6..01b1ab63f56 100644 --- a/tests/components/otbr/test_silabs_multiprotocol.py +++ b/tests/components/otbr/test_silabs_multiprotocol.py @@ -5,7 +5,6 @@ from unittest.mock import patch import pytest from python_otbr_api import ActiveDataSet, tlv_parser -from homeassistant.components import otbr from homeassistant.components.otbr import ( silabs_multiprotocol as otbr_silabs_multiprotocol, ) @@ -35,7 +34,7 @@ DATASET_CH16_PENDING = ( async def test_async_change_channel( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test test_async_change_channel.""" + """Test async_change_channel.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -63,7 +62,7 @@ async def test_async_change_channel( async def test_async_change_channel_no_pending( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test test_async_change_channel when the pending dataset already expired.""" + """Test async_change_channel when the pending dataset already expired.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -95,7 +94,7 @@ async def test_async_change_channel_no_pending( async def test_async_change_channel_no_update( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: - """Test test_async_change_channel when we didn't get a dataset from the OTBR.""" + """Test async_change_channel when we didn't get a dataset from the OTBR.""" store = await dataset_store.async_get_store(hass) assert len(store.datasets) == 1 @@ -126,6 +125,17 @@ async def test_async_change_channel_no_otbr(hass: HomeAssistant) -> None: mock_set_channel.assert_not_awaited() +async def test_async_change_channel_non_matching_url( + hass: HomeAssistant, otbr_config_entry_multipan: str +) -> None: + """Test async_change_channel when otbr is not configured.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL + with patch("python_otbr_api.OTBR.set_channel") as mock_set_channel: + await otbr_silabs_multiprotocol.async_change_channel(hass, 16, delay=0) + mock_set_channel.assert_not_awaited() + + async def test_async_get_channel( hass: HomeAssistant, otbr_config_entry_multipan ) -> None: @@ -169,7 +179,18 @@ async def test_async_get_channel_no_otbr(hass: HomeAssistant) -> None: """Test test_async_get_channel when otbr is not configured.""" with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: - await otbr_silabs_multiprotocol.async_get_channel(hass) + assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None + mock_get_active_dataset.assert_not_awaited() + + +async def test_async_get_channel_non_matching_url( + hass: HomeAssistant, otbr_config_entry_multipan: str +) -> None: + """Test async_change_channel when otbr is not configured.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL + with patch("python_otbr_api.OTBR.get_active_dataset") as mock_get_active_dataset: + assert await otbr_silabs_multiprotocol.async_get_channel(hass) is None mock_get_active_dataset.assert_not_awaited() @@ -178,11 +199,11 @@ async def test_async_get_channel_no_otbr(hass: HomeAssistant) -> None: [(OTBR_MULTIPAN_URL, True), (OTBR_NON_MULTIPAN_URL, False)], ) async def test_async_using_multipan( - hass: HomeAssistant, otbr_config_entry_multipan, url: str, expected: bool + hass: HomeAssistant, otbr_config_entry_multipan: str, url: str, expected: bool ) -> None: """Test async_change_channel when otbr is not configured.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - data.url = url + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = url assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is expected @@ -191,3 +212,12 @@ async def test_async_using_multipan_no_otbr(hass: HomeAssistant) -> None: """Test async_change_channel when otbr is not configured.""" assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False + + +async def test_async_using_multipan_non_matching_url( + hass: HomeAssistant, otbr_config_entry_multipan: str +) -> None: + """Test async_change_channel when otbr is not configured.""" + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + config_entry.runtime_data.url = OTBR_NON_MULTIPAN_URL + assert await otbr_silabs_multiprotocol.async_using_multipan(hass) is False diff --git a/tests/components/otbr/test_util.py b/tests/components/otbr/test_util.py index 3b1edcfeb5b..0ed3041bea8 100644 --- a/tests/components/otbr/test_util.py +++ b/tests/components/otbr/test_util.py @@ -1,6 +1,6 @@ """Test OTBR Utility functions.""" -from unittest.mock import patch +from unittest.mock import AsyncMock, patch import pytest import python_otbr_api @@ -31,28 +31,37 @@ async def test_get_allowed_channel( assert await otbr.util.get_allowed_channel(hass, OTBR_NON_MULTIPAN_URL) is None -async def test_factory_reset(hass: HomeAssistant, otbr_config_entry_multipan) -> None: +async def test_factory_reset( + hass: HomeAssistant, + otbr_config_entry_multipan: str, + get_border_agent_id: AsyncMock, +) -> None: """Test factory_reset.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - + new_ba_id = b"new_ba_id" + get_border_agent_id.return_value = new_ba_id + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + assert config_entry.unique_id != new_ba_id.hex() with ( patch("python_otbr_api.OTBR.factory_reset") as factory_reset_mock, patch( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await data.factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() + # Check the unique_id is updated + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) + assert config_entry.unique_id == new_ba_id.hex() + async def test_factory_reset_not_supported( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -62,18 +71,17 @@ async def test_factory_reset_not_supported( "python_otbr_api.OTBR.delete_active_dataset" ) as delete_active_dataset_mock, ): - await data.factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() async def test_factory_reset_error_1( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -86,18 +94,17 @@ async def test_factory_reset_error_1( HomeAssistantError, ), ): - await data.factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_not_called() factory_reset_mock.assert_called_once_with() async def test_factory_reset_error_2( - hass: HomeAssistant, otbr_config_entry_multipan + hass: HomeAssistant, otbr_config_entry_multipan: str ) -> None: """Test factory_reset.""" - data: otbr.OTBRData = hass.data[otbr.DOMAIN] - + config_entry = hass.config_entries.async_get_entry(otbr_config_entry_multipan) with ( patch( "python_otbr_api.OTBR.factory_reset", @@ -111,7 +118,7 @@ async def test_factory_reset_error_2( HomeAssistantError, ), ): - await data.factory_reset() + await config_entry.runtime_data.factory_reset(hass) delete_active_dataset_mock.assert_called_once_with() factory_reset_mock.assert_called_once_with() diff --git a/tests/components/overkiz/test_config_flow.py b/tests/components/overkiz/test_config_flow.py index 50870ae85fe..cef5ef350a9 100644 --- a/tests/components/overkiz/test_config_flow.py +++ b/tests/components/overkiz/test_config_flow.py @@ -573,15 +573,7 @@ async def test_cloud_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "cloud" @@ -623,15 +615,7 @@ async def test_cloud_reauth_wrong_account(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "cloud" @@ -672,15 +656,7 @@ async def test_local_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "local_or_cloud" @@ -731,15 +707,7 @@ async def test_local_reauth_wrong_account(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "local_or_cloud" diff --git a/tests/components/ovo_energy/test_config_flow.py b/tests/components/ovo_energy/test_config_flow.py index 00899e745b9..c3f77ca5007 100644 --- a/tests/components/ovo_energy/test_config_flow.py +++ b/tests/components/ovo_energy/test_config_flow.py @@ -121,15 +121,15 @@ async def test_full_flow_implementation(hass: HomeAssistant) -> None: async def test_reauth_authorization_error(hass: HomeAssistant) -> None: """Test we show user form on authorization error.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -147,15 +147,15 @@ async def test_reauth_authorization_error(hass: HomeAssistant) -> None: async def test_reauth_connection_error(hass: HomeAssistant) -> None: """Test we show user form on connection error.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", side_effect=aiohttp.ClientError, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" @@ -173,20 +173,15 @@ async def test_reauth_connection_error(hass: HomeAssistant) -> None: async def test_reauth_flow(hass: HomeAssistant) -> None: """Test reauth works.""" + mock_config = MockConfigEntry( + domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT + ) + mock_config.add_to_hass(hass) with patch( "homeassistant.components.ovo_energy.config_flow.OVOEnergy.authenticate", return_value=False, ): - mock_config = MockConfigEntry( - domain=DOMAIN, unique_id=UNIQUE_ID, data=FIXTURE_USER_INPUT - ) - mock_config.add_to_hass(hass) - - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=FIXTURE_USER_INPUT, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth" diff --git a/tests/components/owntracks/test_config_flow.py b/tests/components/owntracks/test_config_flow.py index 818524c1c50..b1172eb4a31 100644 --- a/tests/components/owntracks/test_config_flow.py +++ b/tests/components/owntracks/test_config_flow.py @@ -51,7 +51,7 @@ def mock_not_supports_encryption(): yield -async def init_config_flow(hass): +async def init_config_flow(hass: HomeAssistant) -> config_flow.OwnTracksFlow: """Init a configuration flow.""" await async_process_ha_core_config( hass, diff --git a/tests/components/owntracks/test_device_tracker.py b/tests/components/owntracks/test_device_tracker.py index 0648a94c70b..2f35139c021 100644 --- a/tests/components/owntracks/test_device_tracker.py +++ b/tests/components/owntracks/test_device_tracker.py @@ -1,8 +1,10 @@ """The tests for the Owntracks device tracker.""" import base64 +from collections.abc import Callable, Generator import json import pickle +from typing import Any from unittest.mock import patch from nacl.encoding import Base64Encoder @@ -18,6 +20,8 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, async_fire_mqtt_message from tests.typing import ClientSessionGenerator, MqttMockHAClient +type OwnTracksContextFactory = Callable[[], owntracks.OwnTracksContext] + USER = "greg" DEVICE = "phone" @@ -291,7 +295,7 @@ def setup_comp( hass: HomeAssistant, mock_device_tracker_conf: list[Device], mqtt_mock: MqttMockHAClient, -): +) -> None: """Initialize components.""" hass.loop.run_until_complete(async_setup_component(hass, "device_tracker", {})) @@ -302,7 +306,9 @@ def setup_comp( hass.states.async_set("zone.outer", "zoning", OUTER_ZONE) -async def setup_owntracks(hass, config, ctx_cls=owntracks.OwnTracksContext): +async def setup_owntracks( + hass: HomeAssistant, config: dict[str, Any], ctx_cls=owntracks.OwnTracksContext +) -> None: """Set up OwnTracks.""" MockConfigEntry( domain="owntracks", data={"webhook_id": "owntracks_test", "secret": "abcd"} @@ -314,7 +320,7 @@ async def setup_owntracks(hass, config, ctx_cls=owntracks.OwnTracksContext): @pytest.fixture -def context(hass, setup_comp): +def context(hass: HomeAssistant, setup_comp: None) -> OwnTracksContextFactory: """Set up the mocked context.""" orig_context = owntracks.OwnTracksContext context = None @@ -344,7 +350,9 @@ def context(hass, setup_comp): return get_context -async def send_message(hass, topic, message, corrupt=False): +async def send_message( + hass: HomeAssistant, topic: str, message: dict[str, Any], corrupt: bool = False +) -> None: """Test the sending of a message.""" str_message = json.dumps(message) if corrupt: @@ -356,65 +364,73 @@ async def send_message(hass, topic, message, corrupt=False): await hass.async_block_till_done() -def assert_location_state(hass, location): +def assert_location_state(hass: HomeAssistant, location: str) -> None: """Test the assertion of a location state.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.state == location -def assert_location_latitude(hass, latitude): +def assert_location_latitude(hass: HomeAssistant, latitude: float) -> None: """Test the assertion of a location latitude.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("latitude") == latitude -def assert_location_longitude(hass, longitude): +def assert_location_longitude(hass: HomeAssistant, longitude: float) -> None: """Test the assertion of a location longitude.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("longitude") == longitude -def assert_location_accuracy(hass, accuracy): +def assert_location_accuracy(hass: HomeAssistant, accuracy: int) -> None: """Test the assertion of a location accuracy.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("gps_accuracy") == accuracy -def assert_location_source_type(hass, source_type): +def assert_location_source_type(hass: HomeAssistant, source_type: str) -> None: """Test the assertion of source_type.""" state = hass.states.get(DEVICE_TRACKER_STATE) assert state.attributes.get("source_type") == source_type -def assert_mobile_tracker_state(hass, location, beacon=IBEACON_DEVICE): +def assert_mobile_tracker_state( + hass: HomeAssistant, location: str, beacon: str = IBEACON_DEVICE +) -> None: """Test the assertion of a mobile beacon tracker state.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) assert state.state == location -def assert_mobile_tracker_latitude(hass, latitude, beacon=IBEACON_DEVICE): +def assert_mobile_tracker_latitude( + hass: HomeAssistant, latitude: float, beacon: str = IBEACON_DEVICE +) -> None: """Test the assertion of a mobile beacon tracker latitude.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) assert state.attributes.get("latitude") == latitude -def assert_mobile_tracker_accuracy(hass, accuracy, beacon=IBEACON_DEVICE): +def assert_mobile_tracker_accuracy( + hass: HomeAssistant, accuracy: int, beacon: str = IBEACON_DEVICE +) -> None: """Test the assertion of a mobile beacon tracker accuracy.""" dev_id = MOBILE_BEACON_FMT.format(beacon) state = hass.states.get(dev_id) assert state.attributes.get("gps_accuracy") == accuracy -async def test_location_invalid_devid(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_invalid_devid(hass: HomeAssistant) -> None: """Test the update of a location.""" await send_message(hass, "owntracks/paulus/nexus-5x", LOCATION_MESSAGE) state = hass.states.get("device_tracker.paulus_nexus_5x") assert state.state == "outer" -async def test_location_update(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_update(hass: HomeAssistant) -> None: """Test the update of a location.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -424,7 +440,8 @@ async def test_location_update(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_location_update_no_t_key(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_update_no_t_key(hass: HomeAssistant) -> None: """Test the update of a location when message does not contain 't'.""" message = LOCATION_MESSAGE.copy() message.pop("t") @@ -436,7 +453,8 @@ async def test_location_update_no_t_key(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_location_inaccurate_gps(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_inaccurate_gps(hass: HomeAssistant) -> None: """Test the location for inaccurate GPS information.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_INACCURATE) @@ -446,7 +464,8 @@ async def test_location_inaccurate_gps(hass: HomeAssistant, context) -> None: assert_location_longitude(hass, LOCATION_MESSAGE["lon"]) -async def test_location_zero_accuracy_gps(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_location_zero_accuracy_gps(hass: HomeAssistant) -> None: """Ignore the location for zero accuracy GPS information.""" await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_ZERO_ACCURACY) @@ -458,7 +477,9 @@ async def test_location_zero_accuracy_gps(hass: HomeAssistant, context) -> None: # ------------------------------------------------------------------------ # GPS based event entry / exit testing -async def test_event_gps_entry_exit(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" # Entering the owntracks circular region named "inner" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -496,7 +517,9 @@ async def test_event_gps_entry_exit(hass: HomeAssistant, context) -> None: assert_location_accuracy(hass, LOCATION_MESSAGE["acc"]) -async def test_event_gps_with_spaces(hass: HomeAssistant, context) -> None: +async def test_event_gps_with_spaces( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" message = build_message({"desc": "inner 2"}, REGION_GPS_ENTER_MESSAGE) await send_message(hass, EVENT_TOPIC, message) @@ -509,7 +532,8 @@ async def test_event_gps_with_spaces(hass: HomeAssistant, context) -> None: assert not context().regions_entered[USER] -async def test_event_gps_entry_inaccurate(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_inaccurate(hass: HomeAssistant) -> None: """Test the event for inaccurate entry.""" # Set location to the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -522,7 +546,9 @@ async def test_event_gps_entry_inaccurate(hass: HomeAssistant, context) -> None: assert_location_state(hass, "inner") -async def test_event_gps_entry_exit_inaccurate(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit_inaccurate( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the event for inaccurate exit.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -542,7 +568,9 @@ async def test_event_gps_entry_exit_inaccurate(hass: HomeAssistant, context) -> assert not context().regions_entered[USER] -async def test_event_gps_entry_exit_zero_accuracy(hass: HomeAssistant, context) -> None: +async def test_event_gps_entry_exit_zero_accuracy( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test entry/exit events with accuracy zero.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE_ZERO) @@ -562,9 +590,8 @@ async def test_event_gps_entry_exit_zero_accuracy(hass: HomeAssistant, context) assert not context().regions_entered[USER] -async def test_event_gps_exit_outside_zone_sets_away( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_exit_outside_zone_sets_away(hass: HomeAssistant) -> None: """Test the event for exit zone.""" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) assert_location_state(hass, "inner") @@ -577,7 +604,8 @@ async def test_event_gps_exit_outside_zone_sets_away( assert_location_state(hass, STATE_NOT_HOME) -async def test_event_gps_entry_exit_right_order(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_exit_right_order(hass: HomeAssistant) -> None: """Test the event for ordering.""" # Enter inner zone # Set location to the outer zone. @@ -602,7 +630,8 @@ async def test_event_gps_entry_exit_right_order(hass: HomeAssistant, context) -> assert_location_state(hass, "outer") -async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant) -> None: """Test the event for wrong order.""" # Enter inner zone await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -625,7 +654,8 @@ async def test_event_gps_entry_exit_wrong_order(hass: HomeAssistant, context) -> assert_location_state(hass, "outer") -async def test_event_gps_entry_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_entry_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # Just treat as location update message = build_message({"desc": "unknown"}, REGION_GPS_ENTER_MESSAGE) @@ -634,7 +664,8 @@ async def test_event_gps_entry_unknown_zone(hass: HomeAssistant, context) -> Non assert_location_state(hass, "inner") -async def test_event_gps_exit_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_gps_exit_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # Just treat as location update message = build_message({"desc": "unknown"}, REGION_GPS_LEAVE_MESSAGE) @@ -643,7 +674,8 @@ async def test_event_gps_exit_unknown_zone(hass: HomeAssistant, context) -> None assert_location_state(hass, "outer") -async def test_event_entry_zone_loading_dash(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_entry_zone_loading_dash(hass: HomeAssistant) -> None: """Test the event for zone landing.""" # Make sure the leading - is ignored # Owntracks uses this to switch on hold @@ -652,7 +684,9 @@ async def test_event_entry_zone_loading_dash(hass: HomeAssistant, context) -> No assert_location_state(hass, "inner") -async def test_events_only_on(hass: HomeAssistant, context) -> None: +async def test_events_only_on( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test events_only config suppresses location updates.""" # Sending a location message that is not home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_NOT_HOME) @@ -673,7 +707,9 @@ async def test_events_only_on(hass: HomeAssistant, context) -> None: assert_location_state(hass, STATE_NOT_HOME) -async def test_events_only_off(hass: HomeAssistant, context) -> None: +async def test_events_only_off( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test when events_only is False.""" # Sending a location message that is not home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE_NOT_HOME) @@ -694,7 +730,8 @@ async def test_events_only_off(hass: HomeAssistant, context) -> None: assert_location_state(hass, "outer") -async def test_event_source_type_entry_exit(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_source_type_entry_exit(hass: HomeAssistant) -> None: """Test the entry and exit events of source type.""" # Entering the owntracks circular region named "inner" await send_message(hass, EVENT_TOPIC, REGION_GPS_ENTER_MESSAGE) @@ -724,7 +761,9 @@ async def test_event_source_type_entry_exit(hass: HomeAssistant, context) -> Non # Region Beacon based event entry / exit testing -async def test_event_region_entry_exit(hass: HomeAssistant, context) -> None: +async def test_event_region_entry_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" # Seeing a beacon named "inner" await send_message(hass, EVENT_TOPIC, REGION_BEACON_ENTER_MESSAGE) @@ -763,7 +802,9 @@ async def test_event_region_entry_exit(hass: HomeAssistant, context) -> None: assert_location_accuracy(hass, LOCATION_MESSAGE["acc"]) -async def test_event_region_with_spaces(hass: HomeAssistant, context) -> None: +async def test_event_region_with_spaces( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the entry event.""" message = build_message({"desc": "inner 2"}, REGION_BEACON_ENTER_MESSAGE) await send_message(hass, EVENT_TOPIC, message) @@ -776,9 +817,8 @@ async def test_event_region_with_spaces(hass: HomeAssistant, context) -> None: assert not context().regions_entered[USER] -async def test_event_region_entry_exit_right_order( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_region_entry_exit_right_order(hass: HomeAssistant) -> None: """Test the event for ordering.""" # Enter inner zone # Set location to the outer zone. @@ -809,9 +849,8 @@ async def test_event_region_entry_exit_right_order( assert_location_state(hass, "inner") -async def test_event_region_entry_exit_wrong_order( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_region_entry_exit_wrong_order(hass: HomeAssistant) -> None: """Test the event for wrong order.""" # Enter inner zone await send_message(hass, EVENT_TOPIC, REGION_BEACON_ENTER_MESSAGE) @@ -838,9 +877,8 @@ async def test_event_region_entry_exit_wrong_order( assert_location_state(hass, "inner_2") -async def test_event_beacon_unknown_zone_no_location( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_unknown_zone_no_location(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # A beacon which does not match a HA zone is the # definition of a mobile beacon. In this case, "unknown" @@ -865,7 +903,8 @@ async def test_event_beacon_unknown_zone_no_location( assert_mobile_tracker_state(hass, "unknown", "unknown") -async def test_event_beacon_unknown_zone(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_unknown_zone(hass: HomeAssistant) -> None: """Test the event for unknown zone.""" # A beacon which does not match a HA zone is the # definition of a mobile beacon. In this case, "unknown" @@ -885,9 +924,8 @@ async def test_event_beacon_unknown_zone(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer", "unknown") -async def test_event_beacon_entry_zone_loading_dash( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_event_beacon_entry_zone_loading_dash(hass: HomeAssistant) -> None: """Test the event for beacon zone landing.""" # Make sure the leading - is ignored # Owntracks uses this to switch on hold @@ -899,7 +937,8 @@ async def test_event_beacon_entry_zone_loading_dash( # ------------------------------------------------------------------------ # Mobile Beacon based event entry / exit testing -async def test_mobile_enter_move_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_enter_move_beacon(hass: HomeAssistant) -> None: """Test the movement of a beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -923,7 +962,8 @@ async def test_mobile_enter_move_beacon(hass: HomeAssistant, context) -> None: assert_mobile_tracker_latitude(hass, not_home_lat) -async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant) -> None: """Test the enter and the exit of a mobile beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -946,7 +986,8 @@ async def test_mobile_enter_exit_region_beacon(hass: HomeAssistant, context) -> assert_mobile_tracker_state(hass, "outer") -async def test_mobile_exit_move_beacon(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_mobile_exit_move_beacon(hass: HomeAssistant) -> None: """Test the exit move of a beacon.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -968,7 +1009,9 @@ async def test_mobile_exit_move_beacon(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer") -async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> None: +async def test_mobile_multiple_async_enter_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the multiple entering.""" # Test race condition for _ in range(20): @@ -988,7 +1031,9 @@ async def test_mobile_multiple_async_enter_exit(hass: HomeAssistant, context) -> assert len(context().mobile_beacons_active["greg_phone"]) == 0 -async def test_mobile_multiple_enter_exit(hass: HomeAssistant, context) -> None: +async def test_mobile_multiple_enter_exit( + hass: HomeAssistant, context: OwnTracksContextFactory +) -> None: """Test the multiple entering.""" await send_message(hass, EVENT_TOPIC, MOBILE_BEACON_ENTER_EVENT_MESSAGE) await send_message(hass, EVENT_TOPIC, MOBILE_BEACON_ENTER_EVENT_MESSAGE) @@ -997,7 +1042,8 @@ async def test_mobile_multiple_enter_exit(hass: HomeAssistant, context) -> None: assert len(context().mobile_beacons_active["greg_phone"]) == 0 -async def test_complex_movement(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_complex_movement(hass: HomeAssistant) -> None: """Test a complex sequence representative of real-world use.""" # I am in the outer zone. await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1119,9 +1165,8 @@ async def test_complex_movement(hass: HomeAssistant, context) -> None: assert_mobile_tracker_state(hass, "outer") -async def test_complex_movement_sticky_keys_beacon( - hass: HomeAssistant, context -) -> None: +@pytest.mark.usefixtures("context") +async def test_complex_movement_sticky_keys_beacon(hass: HomeAssistant) -> None: """Test a complex sequence which was previously broken.""" # I am not_home await send_message(hass, LOCATION_TOPIC, LOCATION_MESSAGE) @@ -1233,7 +1278,8 @@ async def test_complex_movement_sticky_keys_beacon( assert_mobile_tracker_latitude(hass, INNER_ZONE["latitude"]) -async def test_waypoint_import_simple(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_simple(hass: HomeAssistant) -> None: """Test a simple import of list of waypoints.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message) @@ -1244,7 +1290,8 @@ async def test_waypoint_import_simple(hass: HomeAssistant, context) -> None: assert wayp is not None -async def test_waypoint_import_block(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_block(hass: HomeAssistant) -> None: """Test import of list of waypoints for blocked user.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC_BLOCKED, waypoints_message) @@ -1275,7 +1322,8 @@ async def test_waypoint_import_no_whitelist(hass: HomeAssistant, setup_comp) -> assert wayp is not None -async def test_waypoint_import_bad_json(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_bad_json(hass: HomeAssistant) -> None: """Test importing a bad JSON payload.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message, True) @@ -1286,7 +1334,8 @@ async def test_waypoint_import_bad_json(hass: HomeAssistant, context) -> None: assert wayp is None -async def test_waypoint_import_existing(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_waypoint_import_existing(hass: HomeAssistant) -> None: """Test importing a zone that exists.""" waypoints_message = WAYPOINTS_EXPORTED_MESSAGE.copy() await send_message(hass, WAYPOINTS_TOPIC, waypoints_message) @@ -1299,7 +1348,8 @@ async def test_waypoint_import_existing(hass: HomeAssistant, context) -> None: assert wayp == new_wayp -async def test_single_waypoint_import(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_single_waypoint_import(hass: HomeAssistant) -> None: """Test single waypoint message.""" waypoint_message = WAYPOINT_MESSAGE.copy() await send_message(hass, WAYPOINT_TOPIC, waypoint_message) @@ -1307,7 +1357,8 @@ async def test_single_waypoint_import(hass: HomeAssistant, context) -> None: assert wayp is not None -async def test_not_implemented_message(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_not_implemented_message(hass: HomeAssistant) -> None: """Handle not implemented message type.""" patch_handler = patch( "homeassistant.components.owntracks.messages.async_handle_not_impl_msg", @@ -1318,7 +1369,8 @@ async def test_not_implemented_message(hass: HomeAssistant, context) -> None: patch_handler.stop() -async def test_unsupported_message(hass: HomeAssistant, context) -> None: +@pytest.mark.usefixtures("context") +async def test_unsupported_message(hass: HomeAssistant) -> None: """Handle not implemented message type.""" patch_handler = patch( "homeassistant.components.owntracks.messages.async_handle_unsupported_msg", @@ -1385,7 +1437,7 @@ def mock_cipher(): @pytest.fixture -def config_context(hass, setup_comp): +def config_context(setup_comp: None) -> Generator[None]: """Set up the mocked context.""" patch_load = patch( "homeassistant.components.device_tracker.async_load_config", diff --git a/tests/components/panasonic_viera/test_remote.py b/tests/components/panasonic_viera/test_remote.py index 3ae241fc5e9..43f11c7d766 100644 --- a/tests/components/panasonic_viera/test_remote.py +++ b/tests/components/panasonic_viera/test_remote.py @@ -18,7 +18,7 @@ from .conftest import MOCK_CONFIG_DATA, MOCK_DEVICE_INFO, MOCK_ENCRYPTION_DATA from tests.common import MockConfigEntry -async def setup_panasonic_viera(hass): +async def setup_panasonic_viera(hass: HomeAssistant) -> None: """Initialize integration for tests.""" mock_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/permobil/test_config_flow.py b/tests/components/permobil/test_config_flow.py index ea39e678459..4474340f811 100644 --- a/tests/components/permobil/test_config_flow.py +++ b/tests/components/permobil/test_config_flow.py @@ -287,6 +287,7 @@ async def test_config_flow_reauth_success( result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": "reauth", "entry_id": mock_entry.entry_id}, + data=mock_entry.data, ) assert result["type"] is FlowResultType.FORM @@ -329,6 +330,7 @@ async def test_config_flow_reauth_fail_invalid_code( result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": "reauth", "entry_id": mock_entry.entry_id}, + data=mock_entry.data, ) assert result["type"] is FlowResultType.FORM @@ -366,6 +368,7 @@ async def test_config_flow_reauth_fail_code_request( result = await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": "reauth", "entry_id": reauth_entry.entry_id}, + data=mock_entry.data, ) assert result["type"] is FlowResultType.ABORT diff --git a/tests/components/person/conftest.py b/tests/components/person/conftest.py index ecec42b003d..a6dc95ccc9e 100644 --- a/tests/components/person/conftest.py +++ b/tests/components/person/conftest.py @@ -18,7 +18,7 @@ DEVICE_TRACKER_2 = "device_tracker.test_tracker_2" @pytest.fixture -def storage_collection(hass): +def storage_collection(hass: HomeAssistant) -> person.PersonStorageCollection: """Return an empty storage collection.""" id_manager = collection.IDManager() return person.PersonStorageCollection( diff --git a/tests/components/philips_js/test_config_flow.py b/tests/components/philips_js/test_config_flow.py index d7f539db9cf..80d05961813 100644 --- a/tests/components/philips_js/test_config_flow.py +++ b/tests/components/philips_js/test_config_flow.py @@ -60,7 +60,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry) -> None: async def test_reauth( - hass: HomeAssistant, mock_setup_entry, mock_config_entry, mock_tv + hass: HomeAssistant, mock_setup_entry, mock_config_entry: MockConfigEntry, mock_tv ) -> None: """Test we get the form.""" @@ -69,15 +69,7 @@ async def test_reauth( assert await hass.config_entries.async_setup(mock_config_entry.entry_id) assert len(mock_setup_entry.mock_calls) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/picnic/test_config_flow.py b/tests/components/picnic/test_config_flow.py index 9ba18dac9a9..8d668b28c16 100644 --- a/tests/components/picnic/test_config_flow.py +++ b/tests/components/picnic/test_config_flow.py @@ -170,16 +170,15 @@ async def test_step_reauth(hass: HomeAssistant, picnic_api) -> None: # Create a mocked config entry conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id=picnic_api().get_user()["user_id"], data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" @@ -210,16 +209,15 @@ async def test_step_reauth_failed(hass: HomeAssistant) -> None: user_id = "f29-2a6-o32n" conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id=user_id, data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" @@ -249,16 +247,15 @@ async def test_step_reauth_different_account(hass: HomeAssistant, picnic_api) -> # Create a mocked config entry, unique_id should be different that the user id in the api response conf = {CONF_ACCESS_TOKEN: "a3p98fsen.a39p3fap", CONF_COUNTRY_CODE: "NL"} - MockConfigEntry( + entry = MockConfigEntry( domain=DOMAIN, unique_id="3fpawh-ues-af3ho", data=conf, - ).add_to_hass(hass) + ) + entry.add_to_hass(hass) # Init a re-auth flow - result_init = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=conf - ) + result_init = await entry.start_reauth_flow(hass) assert result_init["type"] is FlowResultType.FORM assert result_init["step_id"] == "user" diff --git a/tests/components/pilight/test_init.py b/tests/components/pilight/test_init.py index c48135f59eb..dfc62d30619 100644 --- a/tests/components/pilight/test_init.py +++ b/tests/components/pilight/test_init.py @@ -40,7 +40,7 @@ class PilightDaemonSim: "message": {"id": 0, "unit": 0, "off": 1}, } - def __init__(self, host, port): + def __init__(self, host, port) -> None: """Init pilight client, ignore parameters.""" def send_code(self, call): diff --git a/tests/components/pilight/test_sensor.py b/tests/components/pilight/test_sensor.py index 9f529117642..e960e46b50a 100644 --- a/tests/components/pilight/test_sensor.py +++ b/tests/components/pilight/test_sensor.py @@ -1,6 +1,7 @@ """The tests for the Pilight sensor platform.""" import logging +from typing import Any import pytest @@ -17,7 +18,9 @@ def setup_comp(hass: HomeAssistant) -> None: mock_component(hass, "pilight") -def fire_pilight_message(hass, protocol, data): +def fire_pilight_message( + hass: HomeAssistant, protocol: str, data: dict[str, Any] +) -> None: """Fire the fake Pilight message.""" message = {pilight.CONF_PROTOCOL: protocol} message.update(data) diff --git a/tests/components/ping/snapshots/test_binary_sensor.ambr b/tests/components/ping/snapshots/test_binary_sensor.ambr index 98ea9a8a847..24717938874 100644 --- a/tests/components/ping/snapshots/test_binary_sensor.ambr +++ b/tests/components/ping/snapshots/test_binary_sensor.ambr @@ -1,64 +1,4 @@ # serializer version: 1 -# name: test_sensor - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': None, - 'entity_id': 'binary_sensor.10_10_10_10', - 'has_entity_name': False, - 'hidden_by': None, - 'icon': None, - 'id': , - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': '10.10.10.10', - 'platform': 'ping', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor.1 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': '10.10.10.10', - 'round_trip_time_avg': 4.333, - 'round_trip_time_max': 10, - 'round_trip_time_mdev': '', - 'round_trip_time_min': 1, - }), - 'context': , - 'entity_id': 'binary_sensor.10_10_10_10', - 'last_changed': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor.2 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'connectivity', - 'friendly_name': '10.10.10.10', - }), - 'context': , - 'entity_id': 'binary_sensor.10_10_10_10', - 'last_changed': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_setup_and_update EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/plex/helpers.py b/tests/components/plex/helpers.py index 4828b972d9d..434c31996e4 100644 --- a/tests/components/plex/helpers.py +++ b/tests/components/plex/helpers.py @@ -5,6 +5,7 @@ from typing import Any from plexwebsocket import SIGNAL_CONNECTION_STATE, STATE_CONNECTED +from homeassistant.core import HomeAssistant from homeassistant.helpers.typing import UNDEFINED, UndefinedType import homeassistant.util.dt as dt_util @@ -39,7 +40,7 @@ def trigger_plex_update( callback(msgtype, UPDATE_PAYLOAD if payload is UNDEFINED else payload, None) -async def wait_for_debouncer(hass): +async def wait_for_debouncer(hass: HomeAssistant) -> None: """Move time forward to wait for sensor debouncer.""" next_update = dt_util.utcnow() + timedelta(seconds=3) async_fire_time_changed(hass, next_update) diff --git a/tests/components/plex/mock_classes.py b/tests/components/plex/mock_classes.py index c6f1aeda9b7..92844f755d6 100644 --- a/tests/components/plex/mock_classes.py +++ b/tests/components/plex/mock_classes.py @@ -67,7 +67,7 @@ GDM_CLIENT_PAYLOAD = [ class MockGDM: """Mock a GDM instance.""" - def __init__(self, disabled=False): + def __init__(self, disabled=False) -> None: """Initialize the object.""" self.entries = [] self.disabled = disabled diff --git a/tests/components/plex/test_config_flow.py b/tests/components/plex/test_config_flow.py index 08733a7dd17..c4ec108bb6b 100644 --- a/tests/components/plex/test_config_flow.py +++ b/tests/components/plex/test_config_flow.py @@ -26,7 +26,6 @@ from homeassistant.components.plex.const import ( ) from homeassistant.config_entries import ( SOURCE_INTEGRATION_DISCOVERY, - SOURCE_REAUTH, SOURCE_USER, ConfigEntryState, ) @@ -537,7 +536,7 @@ async def test_manual_config(hass: HomeAssistant, mock_plex_calls) -> None: class WrongCertValidaitionException(requests.exceptions.SSLError): """Mock the exception showing an unmatched error.""" - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called self.__context__ = ssl.SSLCertVerificationError( "some random message that doesn't match" ) @@ -744,11 +743,7 @@ async def test_reauth( """Test setup and reauthorization of a Plex token.""" entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flow_id = result["flow_id"] with ( @@ -795,11 +790,7 @@ async def test_reauth_multiple_servers_available( entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flow_id = result["flow_id"] diff --git a/tests/components/plex/test_init.py b/tests/components/plex/test_init.py index 15af78faf65..490091998ff 100644 --- a/tests/components/plex/test_init.py +++ b/tests/components/plex/test_init.py @@ -209,7 +209,7 @@ async def test_setup_when_certificate_changed( class WrongCertHostnameException(requests.exceptions.SSLError): """Mock the exception showing a mismatched hostname.""" - def __init__(self): # pylint: disable=super-init-not-called + def __init__(self) -> None: # pylint: disable=super-init-not-called self.__context__ = ssl.SSLCertVerificationError( f"hostname '{old_domain}' doesn't match" ) diff --git a/tests/components/plex/test_playback.py b/tests/components/plex/test_playback.py index 183a779c940..c4206bd5f3e 100644 --- a/tests/components/plex/test_playback.py +++ b/tests/components/plex/test_playback.py @@ -28,7 +28,7 @@ class MockPlexMedia: viewOffset = 333 _server = Mock(_baseurl=PLEX_DIRECT_URL) - def __init__(self, title, mediatype): + def __init__(self, title, mediatype) -> None: """Initialize the instance.""" self.listType = mediatype self.title = title diff --git a/tests/components/point/test_config_flow.py b/tests/components/point/test_config_flow.py index ec71b04b84b..71f3f31ce8d 100644 --- a/tests/components/point/test_config_flow.py +++ b/tests/components/point/test_config_flow.py @@ -10,7 +10,9 @@ from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType -def init_config_flow(hass, side_effect=None): +def init_config_flow( + hass: HomeAssistant, side_effect: type[Exception] | None = None +) -> config_flow.PointFlowHandler: """Init a configuration flow.""" config_flow.register_flow_implementation(hass, DOMAIN, "id", "secret") flow = config_flow.PointFlowHandler() @@ -22,7 +24,7 @@ def init_config_flow(hass, side_effect=None): @pytest.fixture -def is_authorized(): +def is_authorized() -> bool: """Set PointSession authorized.""" return True diff --git a/tests/components/powerwall/mocks.py b/tests/components/powerwall/mocks.py index e43ccee16f1..3081776483c 100644 --- a/tests/components/powerwall/mocks.py +++ b/tests/components/powerwall/mocks.py @@ -17,6 +17,7 @@ from tesla_powerwall import ( ) from homeassistant.core import HomeAssistant +from homeassistant.util.json import JsonValueType from tests.common import load_fixture @@ -87,7 +88,7 @@ async def _mock_powerwall_return_value( return powerwall_mock -async def _mock_powerwall_site_name(hass, site_name): +async def _mock_powerwall_site_name(hass: HomeAssistant, site_name: str) -> MagicMock: powerwall_mock = MagicMock(Powerwall) powerwall_mock.__aenter__.return_value = powerwall_mock @@ -110,7 +111,7 @@ async def _mock_powerwall_side_effect(site_info=None): return powerwall_mock -async def _async_load_json_fixture(hass, path): +async def _async_load_json_fixture(hass: HomeAssistant, path: str) -> JsonValueType: fixture = await hass.async_add_executor_job( load_fixture, os.path.join("powerwall", path) ) diff --git a/tests/components/powerwall/test_config_flow.py b/tests/components/powerwall/test_config_flow.py index db0ef2e9884..5074a289d19 100644 --- a/tests/components/powerwall/test_config_flow.py +++ b/tests/components/powerwall/test_config_flow.py @@ -336,11 +336,7 @@ async def test_form_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/powerwall/test_switch.py b/tests/components/powerwall/test_switch.py index b01f60210a6..b4ff0ca724e 100644 --- a/tests/components/powerwall/test_switch.py +++ b/tests/components/powerwall/test_switch.py @@ -1,6 +1,6 @@ """Test for Powerwall off-grid switch.""" -from unittest.mock import patch +from unittest.mock import MagicMock, patch import pytest from tesla_powerwall import GridStatus, PowerwallError @@ -24,7 +24,7 @@ ENTITY_ID = "switch.mysite_off_grid_operation" @pytest.fixture(name="mock_powerwall") -async def mock_powerwall_fixture(hass): +async def mock_powerwall_fixture(hass: HomeAssistant) -> MagicMock: """Set up base powerwall fixture.""" mock_powerwall = await _mock_powerwall_with_fixtures(hass) diff --git a/tests/components/profiler/test_init.py b/tests/components/profiler/test_init.py index 2eca84b43fe..3f0e0b92056 100644 --- a/tests/components/profiler/test_init.py +++ b/tests/components/profiler/test_init.py @@ -176,12 +176,12 @@ async def test_dump_log_object( await hass.async_block_till_done() class DumpLogDummy: - def __init__(self, fail): + def __init__(self, fail) -> None: self.fail = fail def __repr__(self): if self.fail: - raise Exception("failed") # pylint: disable=broad-exception-raised + raise Exception("failed") # noqa: TRY002 return "" obj1 = DumpLogDummy(False) @@ -284,14 +284,14 @@ async def test_lru_stats(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) return 1 class DomainData: - def __init__(self): + def __init__(self) -> None: self._data = LRU(1) domain_data = DomainData() assert hass.services.has_service(DOMAIN, SERVICE_LRU_STATS) class LRUCache: - def __init__(self): + def __init__(self) -> None: self._data = {"sqlalchemy_test": 1} sqlalchemy_lru_cache = LRUCache() diff --git a/tests/components/prometheus/test_init.py b/tests/components/prometheus/test_init.py index 12643c39dfa..0dfa3210671 100644 --- a/tests/components/prometheus/test_init.py +++ b/tests/components/prometheus/test_init.py @@ -11,6 +11,7 @@ import prometheus_client import pytest from homeassistant.components import ( + alarm_control_panel, binary_sensor, climate, counter, @@ -61,6 +62,8 @@ from homeassistant.const import ( CONTENT_TYPE_TEXT_PLAIN, DEGREE, PERCENTAGE, + STATE_ALARM_ARMED_AWAY, + STATE_ALARM_ARMED_HOME, STATE_CLOSED, STATE_CLOSING, STATE_HOME, @@ -632,6 +635,43 @@ async def test_fan( ) +@pytest.mark.parametrize("namespace", [""]) +async def test_alarm_control_panel( + client: ClientSessionGenerator, + alarm_control_panel_entities: dict[str, er.RegistryEntry], +) -> None: + """Test prometheus metrics for alarm control panel.""" + body = await generate_latest_metrics(client) + + assert ( + 'alarm_control_panel_state{domain="alarm_control_panel",' + 'entity="alarm_control_panel.alarm_control_panel_1",' + 'friendly_name="Alarm Control Panel 1",' + 'state="armed_away"} 1.0' in body + ) + + assert ( + 'alarm_control_panel_state{domain="alarm_control_panel",' + 'entity="alarm_control_panel.alarm_control_panel_1",' + 'friendly_name="Alarm Control Panel 1",' + 'state="disarmed"} 0.0' in body + ) + + assert ( + 'alarm_control_panel_state{domain="alarm_control_panel",' + 'entity="alarm_control_panel.alarm_control_panel_2",' + 'friendly_name="Alarm Control Panel 2",' + 'state="armed_home"} 1.0' in body + ) + + assert ( + 'alarm_control_panel_state{domain="alarm_control_panel",' + 'entity="alarm_control_panel.alarm_control_panel_2",' + 'friendly_name="Alarm Control Panel 2",' + 'state="armed_away"} 0.0' in body + ) + + @pytest.mark.parametrize("namespace", [""]) async def test_cover( client: ClientSessionGenerator, cover_entities: dict[str, er.RegistryEntry] @@ -1903,6 +1943,36 @@ async def fan_fixture( return data +@pytest.fixture(name="alarm_control_panel_entities") +async def alarm_control_panel_fixture( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> dict[str, er.RegistryEntry]: + """Simulate alarm control panel entities.""" + data = {} + alarm_control_panel_1 = entity_registry.async_get_or_create( + domain=alarm_control_panel.DOMAIN, + platform="test", + unique_id="alarm_control_panel_1", + suggested_object_id="alarm_control_panel_1", + original_name="Alarm Control Panel 1", + ) + set_state_with_entry(hass, alarm_control_panel_1, STATE_ALARM_ARMED_AWAY) + data["alarm_control_panel_1"] = alarm_control_panel_1 + + alarm_control_panel_2 = entity_registry.async_get_or_create( + domain=alarm_control_panel.DOMAIN, + platform="test", + unique_id="alarm_control_panel_2", + suggested_object_id="alarm_control_panel_2", + original_name="Alarm Control Panel 2", + ) + set_state_with_entry(hass, alarm_control_panel_2, STATE_ALARM_ARMED_HOME) + data["alarm_control_panel_2"] = alarm_control_panel_2 + + await hass.async_block_till_done() + return data + + @pytest.fixture(name="person_entities") async def person_fixture( hass: HomeAssistant, entity_registry: er.EntityRegistry diff --git a/tests/components/prosegur/test_config_flow.py b/tests/components/prosegur/test_config_flow.py index 9362cecc289..7c3f399ee09 100644 --- a/tests/components/prosegur/test_config_flow.py +++ b/tests/components/prosegur/test_config_flow.py @@ -143,15 +143,7 @@ async def test_reauth_flow(hass: HomeAssistant, mock_list_contracts) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -208,15 +200,7 @@ async def test_reauth_flow_error(hass: HomeAssistant, exception, base_error) -> ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.prosegur.config_flow.Installation.list", diff --git a/tests/components/proximity/test_config_flow.py b/tests/components/proximity/test_config_flow.py index 3ed9f5cba27..626565146d1 100644 --- a/tests/components/proximity/test_config_flow.py +++ b/tests/components/proximity/test_config_flow.py @@ -10,8 +10,8 @@ from homeassistant.components.proximity.const import ( CONF_TRACKED_ENTITIES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER -from homeassistant.const import CONF_NAME, CONF_UNIT_OF_MEASUREMENT, CONF_ZONE +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_ZONE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -120,42 +120,6 @@ async def test_options_flow(hass: HomeAssistant) -> None: } -async def test_import_flow(hass: HomeAssistant) -> None: - """Test import of yaml configuration.""" - with patch( - "homeassistant.components.proximity.async_setup_entry", return_value=True - ) as mock_setup_entry: - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data={ - CONF_NAME: "home", - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1"], - CONF_IGNORED_ZONES: ["zone.work"], - CONF_TOLERANCE: 10, - CONF_UNIT_OF_MEASUREMENT: "km", - }, - ) - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["data"] == { - CONF_NAME: "home", - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1"], - CONF_IGNORED_ZONES: ["zone.work"], - CONF_TOLERANCE: 10, - CONF_UNIT_OF_MEASUREMENT: "km", - } - - zone = hass.states.get("zone.home") - assert result["title"] == zone.name - - await hass.async_block_till_done() - - assert mock_setup_entry.called - - async def test_abort_duplicated_entry(hass: HomeAssistant) -> None: """Test if we abort on duplicate user input data.""" DATA = { diff --git a/tests/components/proximity/test_init.py b/tests/components/proximity/test_init.py index 6c2b54cae29..eeb181e0670 100644 --- a/tests/components/proximity/test_init.py +++ b/tests/components/proximity/test_init.py @@ -2,15 +2,12 @@ import pytest -from homeassistant.components import automation, script -from homeassistant.components.automation import automations_with_entity from homeassistant.components.proximity.const import ( CONF_IGNORED_ZONES, CONF_TOLERANCE, CONF_TRACKED_ENTITIES, DOMAIN, ) -from homeassistant.components.script import scripts_with_entity from homeassistant.const import ( ATTR_FRIENDLY_NAME, CONF_ZONE, @@ -20,109 +17,81 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er import homeassistant.helpers.issue_registry as ir -from homeassistant.setup import async_setup_component from homeassistant.util import slugify from tests.common import MockConfigEntry +async def async_setup_single_entry( + hass: HomeAssistant, + zone: str, + tracked_entites: list[str], + ignored_zones: list[str], + tolerance: int, +) -> MockConfigEntry: + """Set up the proximity component with a single entry.""" + mock_config = MockConfigEntry( + domain=DOMAIN, + title="Home", + data={ + CONF_ZONE: zone, + CONF_TRACKED_ENTITIES: tracked_entites, + CONF_IGNORED_ZONES: ignored_zones, + CONF_TOLERANCE: tolerance, + }, + ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) + await hass.async_block_till_done() + return mock_config + + @pytest.mark.parametrize( - ("friendly_name", "config"), + "config", [ - ( - "home", - { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - }, - ), - ( - "work", - { - "devices": ["device_tracker.test1"], - "tolerance": "1", - "zone": "work", - }, - ), + { + CONF_IGNORED_ZONES: ["zone.work"], + CONF_TRACKED_ENTITIES: ["device_tracker.test1", "device_tracker.test2"], + CONF_TOLERANCE: 1, + CONF_ZONE: "zone.home", + }, + { + CONF_IGNORED_ZONES: [], + CONF_TRACKED_ENTITIES: ["device_tracker.test1"], + CONF_TOLERANCE: 1, + CONF_ZONE: "zone.work", + }, ], ) -async def test_proximities( - hass: HomeAssistant, friendly_name: str, config: dict -) -> None: +async def test_proximities(hass: HomeAssistant, config: dict) -> None: """Test a list of proximities.""" - assert await async_setup_component( - hass, DOMAIN, {"proximity": {friendly_name: config}} + title = hass.states.get(config[CONF_ZONE]).name + mock_config = MockConfigEntry( + domain=DOMAIN, + title=title, + data=config, ) + mock_config.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config.entry_id) await hass.async_block_till_done() - # proximity entity - state = hass.states.get(f"proximity.{friendly_name}") - assert state.state == "not set" - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - hass.states.async_set(f"proximity.{friendly_name}", "0") - await hass.async_block_till_done() - state = hass.states.get(f"proximity.{friendly_name}") - assert state.state == "0" + zone_name = slugify(title) # sensor entities - state = hass.states.get(f"sensor.{friendly_name}_nearest_device") + state = hass.states.get(f"sensor.{zone_name}_nearest_device") assert state.state == STATE_UNKNOWN - for device in config["devices"]: - entity_base_name = f"sensor.{friendly_name}_{slugify(device.split('.')[-1])}" + for device in config[CONF_TRACKED_ENTITIES]: + entity_base_name = f"sensor.{zone_name}_{slugify(device.split('.')[-1])}" state = hass.states.get(f"{entity_base_name}_distance") assert state.state == STATE_UNAVAILABLE state = hass.states.get(f"{entity_base_name}_direction_of_travel") assert state.state == STATE_UNAVAILABLE -async def test_legacy_setup(hass: HomeAssistant) -> None: - """Test legacy setup only on imported entries.""" - config = { - "proximity": { - "home": { - "devices": ["device_tracker.test1"], - "tolerance": "1", - }, - } - } - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - assert hass.states.get("proximity.home") - - mock_config = MockConfigEntry( - domain=DOMAIN, - title="work", - data={ - CONF_ZONE: "zone.work", - CONF_TRACKED_ENTITIES: ["device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_work", - ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - - assert not hass.states.get("proximity.work") - - async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: """Test for tracker in zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -131,12 +100,6 @@ async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "0" - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "arrived" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -150,17 +113,7 @@ async def test_device_tracker_test1_in_zone(hass: HomeAssistant) -> None: async def test_device_tracker_test1_away(hass: HomeAssistant) -> None: """Test for tracker state away.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -170,11 +123,6 @@ async def test_device_tracker_test1_away(hass: HomeAssistant) -> None: await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -190,20 +138,7 @@ async def test_device_tracker_test1_awayfurther( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state away further.""" - - await hass.async_block_till_done() - - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -212,11 +147,6 @@ async def test_device_tracker_test1_awayfurther( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -234,11 +164,6 @@ async def test_device_tracker_test1_awayfurther( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "away_from" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -254,19 +179,7 @@ async def test_device_tracker_test1_awaycloser( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state away closer.""" - await hass.async_block_till_done() - - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", @@ -275,11 +188,6 @@ async def test_device_tracker_test1_awaycloser( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -297,11 +205,6 @@ async def test_device_tracker_test1_awaycloser( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "towards" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -315,27 +218,11 @@ async def test_device_tracker_test1_awaycloser( async def test_all_device_trackers_in_ignored_zone(hass: HomeAssistant) -> None: """Test for tracker in ignored zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set("device_tracker.test1", "work", {"friendly_name": "test1"}) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "not set" - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == STATE_UNKNOWN @@ -349,28 +236,13 @@ async def test_all_device_trackers_in_ignored_zone(hass: HomeAssistant) -> None: async def test_device_tracker_test1_no_coordinates(hass: HomeAssistant) -> None: """Test for tracker with no coordinates.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "not set" - assert state.attributes.get("dir_of_travel") == "not set" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == STATE_UNKNOWN @@ -384,19 +256,8 @@ async def test_device_tracker_test1_no_coordinates(hass: HomeAssistant) -> None: async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> None: """Test for tracker states.""" - assert await async_setup_component( - hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1"], - "tolerance": 1000, - "zone": "home", - } - } - }, + await async_setup_single_entry( + hass, "zone.home", ["device_tracker.test1"], ["zone.work"], 1000 ) hass.states.async_set( @@ -406,11 +267,6 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -428,11 +284,6 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "stationary" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -446,17 +297,13 @@ async def test_device_tracker_test1_awayfurther_a_bit(hass: HomeAssistant) -> No async def test_device_trackers_in_zone(hass: HomeAssistant) -> None: """Test for trackers in zone.""" - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - } - } - } - - assert await async_setup_component(hass, DOMAIN, config) + await async_setup_single_entry( + hass, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, + ) hass.states.async_set( "device_tracker.test1", @@ -471,14 +318,6 @@ async def test_device_trackers_in_zone(hass: HomeAssistant) -> None: ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.state == "0" - assert (state.attributes.get("nearest") == "test1, test2") or ( - state.attributes.get("nearest") == "test2, test1" - ) - assert state.attributes.get("dir_of_travel") == "arrived" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1, test2" @@ -495,30 +334,18 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( hass: HomeAssistant, config_zones ) -> None: """Test for tracker ordering.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -528,11 +355,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -556,11 +378,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test1( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -582,28 +399,19 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( hass: HomeAssistant, config_zones ) -> None: """Test for tracker ordering.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - assert await async_setup_component( + + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -613,11 +421,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -641,11 +444,6 @@ async def test_device_tracker_test1_awayfurther_than_test2_first_test2( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -670,23 +468,15 @@ async def test_device_tracker_test1_awayfurther_test2_in_ignored_zone( hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set("device_tracker.test2", "work", {"friendly_name": "test2"}) - await hass.async_block_till_done() - assert await async_setup_component( - hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, - ) + await async_setup_single_entry( + hass, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, + ) hass.states.async_set( "device_tracker.test1", "not_home", @@ -694,11 +484,6 @@ async def test_device_tracker_test1_awayfurther_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -720,29 +505,19 @@ async def test_device_tracker_test1_awayfurther_test2_first( hass: HomeAssistant, config_zones ) -> None: """Test for tracker state.""" - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) - await hass.async_block_till_done() hass.states.async_set( "device_tracker.test2", "not_home", {"friendly_name": "test2"} ) - await hass.async_block_till_done() - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -776,11 +551,6 @@ async def test_device_tracker_test1_awayfurther_test2_first( hass.states.async_set("device_tracker.test1", "work", {"friendly_name": "test1"}) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -803,7 +573,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) -> None: """Test for tracker states.""" await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", {"friendly_name": "test1"} ) @@ -813,18 +582,12 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - assert await async_setup_component( + await async_setup_single_entry( hass, - DOMAIN, - { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "zone": "home", - } - } - }, + "zone.home", + ["device_tracker.test1", "device_tracker.test2"], + ["zone.work"], + 1, ) hass.states.async_set( @@ -834,11 +597,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -862,11 +620,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test2" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test2" @@ -890,11 +643,6 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( ) await hass.async_block_till_done() - # proximity entity - state = hass.states.get("proximity.home") - assert state.attributes.get("nearest") == "test1" - assert state.attributes.get("dir_of_travel") == "unknown" - # sensor entities state = hass.states.get("sensor.home_nearest_device") assert state.state == "test1" @@ -914,22 +662,10 @@ async def test_device_tracker_test1_nearest_after_test2_in_ignored_zone( async def test_nearest_sensors(hass: HomeAssistant, config_zones) -> None: """Test for nearest sensors.""" - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: ["device_tracker.test1", "device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + await async_setup_single_entry( + hass, "zone.home", ["device_tracker.test1", "device_tracker.test2"], [], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - hass.states.async_set( "device_tracker.test1", "not_home", @@ -1038,71 +774,6 @@ async def test_nearest_sensors(hass: HomeAssistant, config_zones) -> None: assert state.state == STATE_UNKNOWN -async def test_create_deprecated_proximity_issue( - hass: HomeAssistant, - issue_registry: ir.IssueRegistry, -) -> None: - """Test we create an issue for deprecated proximity entities used in automations and scripts.""" - assert await async_setup_component( - hass, - automation.DOMAIN, - { - automation.DOMAIN: { - "alias": "test", - "trigger": {"platform": "state", "entity_id": "proximity.home"}, - "action": { - "service": "automation.turn_on", - "target": {"entity_id": "automation.test"}, - }, - } - }, - ) - assert await async_setup_component( - hass, - script.DOMAIN, - { - script.DOMAIN: { - "test": { - "sequence": [ - { - "condition": "state", - "entity_id": "proximity.home", - "state": "home", - }, - ], - } - } - }, - ) - config = { - "proximity": { - "home": { - "ignored_zones": ["work"], - "devices": ["device_tracker.test1", "device_tracker.test2"], - "tolerance": "1", - }, - "work": {"tolerance": "1", "zone": "work"}, - } - } - - assert await async_setup_component(hass, DOMAIN, config) - await hass.async_block_till_done() - - automation_entities = automations_with_entity(hass, "proximity.home") - assert len(automation_entities) == 1 - assert automation_entities[0] == "automation.test" - - script_entites = scripts_with_entity(hass, "proximity.home") - - assert len(script_entites) == 1 - assert script_entites[0] == "script.test" - assert issue_registry.async_get_issue(DOMAIN, "deprecated_proximity_entity_home") - - assert not issue_registry.async_get_issue( - DOMAIN, "deprecated_proximity_entity_work" - ) - - async def test_create_removed_tracked_entity_issue( hass: HomeAssistant, issue_registry: ir.IssueRegistry, @@ -1119,22 +790,10 @@ async def test_create_removed_tracked_entity_issue( hass.states.async_set(t1.entity_id, "not_home") hass.states.async_set(t2.entity_id, "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id, t2.entity_id], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + await async_setup_single_entry( + hass, "zone.home", [t1.entity_id, t2.entity_id], [], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = f"sensor.home_{t1.entity_id.split('.')[-1]}_distance" sensor_t2 = f"sensor.home_{t2.entity_id.split('.')[-1]}_distance" @@ -1168,22 +827,10 @@ async def test_track_renamed_tracked_entity( hass.states.async_set(t1.entity_id, "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + mock_config = await async_setup_single_entry( + hass, "zone.home", [t1.entity_id], ["zone.work"], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = f"sensor.home_{t1.entity_id.split('.')[-1]}_distance" entity = entity_registry.async_get(sensor_t1) @@ -1216,31 +863,60 @@ async def test_sensor_unique_ids( hass.states.async_set("device_tracker.test2", "not_home") - mock_config = MockConfigEntry( - domain=DOMAIN, - title="home", - data={ - CONF_ZONE: "zone.home", - CONF_TRACKED_ENTITIES: [t1.entity_id, "device_tracker.test2"], - CONF_IGNORED_ZONES: [], - CONF_TOLERANCE: 1, - }, - unique_id=f"{DOMAIN}_home", + mock_config = await async_setup_single_entry( + hass, "zone.home", [t1.entity_id, "device_tracker.test2"], ["zone.work"], 1 ) - mock_config.add_to_hass(hass) - assert await hass.config_entries.async_setup(mock_config.entry_id) - await hass.async_block_till_done() - sensor_t1 = "sensor.home_test_tracker_1_distance" entity = entity_registry.async_get(sensor_t1) assert entity assert entity.unique_id == f"{mock_config.entry_id}_{t1.id}_dist_to_zone" state = hass.states.get(sensor_t1) - assert state.attributes.get(ATTR_FRIENDLY_NAME) == "home Test tracker 1 Distance" + assert state.attributes.get(ATTR_FRIENDLY_NAME) == "Home Test tracker 1 Distance" entity = entity_registry.async_get("sensor.home_test2_distance") assert entity assert ( entity.unique_id == f"{mock_config.entry_id}_device_tracker.test2_dist_to_zone" ) + + +async def test_tracked_zone_is_removed(hass: HomeAssistant) -> None: + """Test that tracked zone is removed.""" + await async_setup_single_entry(hass, "zone.home", ["device_tracker.test1"], [], 1) + + hass.states.async_set( + "device_tracker.test1", + "home", + {"friendly_name": "test1", "latitude": 2.1, "longitude": 1.1}, + ) + await hass.async_block_till_done() + + # check sensor entities + state = hass.states.get("sensor.home_nearest_device") + assert state.state == "test1" + + entity_base_name = "sensor.home_test1" + state = hass.states.get(f"{entity_base_name}_distance") + assert state.state == "0" + state = hass.states.get(f"{entity_base_name}_direction_of_travel") + assert state.state == "arrived" + + # remove tracked zone and move tracked entity + assert hass.states.async_remove("zone.home") + hass.states.async_set( + "device_tracker.test1", + "home", + {"friendly_name": "test1", "latitude": 2.2, "longitude": 1.2}, + ) + await hass.async_block_till_done() + + # check sensor entities + state = hass.states.get("sensor.home_nearest_device") + assert state.state == STATE_UNKNOWN + + entity_base_name = "sensor.home_test1" + state = hass.states.get(f"{entity_base_name}_distance") + assert state.state == STATE_UNAVAILABLE + state = hass.states.get(f"{entity_base_name}_direction_of_travel") + assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/prusalink/conftest.py b/tests/components/prusalink/conftest.py index 104e4d47afa..9bcf45056cd 100644 --- a/tests/components/prusalink/conftest.py +++ b/tests/components/prusalink/conftest.py @@ -1,16 +1,19 @@ """Fixtures for PrusaLink.""" +from collections.abc import Generator +from typing import Any from unittest.mock import patch import pytest from homeassistant.components.prusalink import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Mock a PrusaLink config entry.""" entry = MockConfigEntry( domain=DOMAIN, @@ -23,7 +26,7 @@ def mock_config_entry(hass): @pytest.fixture -def mock_version_api(hass): +def mock_version_api() -> Generator[dict[str, str]]: """Mock PrusaLink version API.""" resp = { "api": "2.0.0", @@ -36,7 +39,7 @@ def mock_version_api(hass): @pytest.fixture -def mock_info_api(hass): +def mock_info_api() -> Generator[dict[str, Any]]: """Mock PrusaLink info API.""" resp = { "nozzle_diameter": 0.40, @@ -50,7 +53,7 @@ def mock_info_api(hass): @pytest.fixture -def mock_get_legacy_printer(hass): +def mock_get_legacy_printer() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = {"telemetry": {"material": "PLA"}} with patch("pyprusalink.PrusaLink.get_legacy_printer", return_value=resp): @@ -58,7 +61,7 @@ def mock_get_legacy_printer(hass): @pytest.fixture -def mock_get_status_idle(hass): +def mock_get_status_idle() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = { "storage": { @@ -86,7 +89,7 @@ def mock_get_status_idle(hass): @pytest.fixture -def mock_get_status_printing(hass): +def mock_get_status_printing() -> Generator[dict[str, Any]]: """Mock PrusaLink printer API.""" resp = { "job": { @@ -114,7 +117,7 @@ def mock_get_status_printing(hass): @pytest.fixture -def mock_job_api_idle(hass): +def mock_job_api_idle() -> Generator[dict[str, Any]]: """Mock PrusaLink job API having no job.""" resp = {} with patch("pyprusalink.PrusaLink.get_job", return_value=resp): @@ -122,7 +125,7 @@ def mock_job_api_idle(hass): @pytest.fixture -def mock_job_api_idle_mk3(hass): +def mock_job_api_idle_mk3() -> Generator[dict[str, Any]]: """Mock PrusaLink job API having a job with idle state (MK3).""" resp = { "id": 129, @@ -148,7 +151,7 @@ def mock_job_api_idle_mk3(hass): @pytest.fixture -def mock_job_api_printing(hass): +def mock_job_api_printing() -> Generator[dict[str, Any]]: """Mock PrusaLink printing.""" resp = { "id": 129, @@ -174,7 +177,9 @@ def mock_job_api_printing(hass): @pytest.fixture -def mock_job_api_paused(hass, mock_get_status_printing, mock_job_api_printing): +def mock_job_api_paused( + mock_get_status_printing: dict[str, Any], mock_job_api_printing: dict[str, Any] +) -> None: """Mock PrusaLink paused printing.""" mock_job_api_printing["state"] = "PAUSED" mock_get_status_printing["printer"]["state"] = "PAUSED" @@ -182,10 +187,10 @@ def mock_job_api_paused(hass, mock_get_status_printing, mock_job_api_printing): @pytest.fixture def mock_api( - mock_version_api, - mock_info_api, - mock_get_legacy_printer, - mock_get_status_idle, - mock_job_api_idle, -): + mock_version_api: dict[str, str], + mock_info_api: dict[str, Any], + mock_get_legacy_printer: dict[str, Any], + mock_get_status_idle: dict[str, Any], + mock_job_api_idle: dict[str, Any], +) -> None: """Mock PrusaLink API.""" diff --git a/tests/components/ps4/test_init.py b/tests/components/ps4/test_init.py index 180f51295ac..3a9aac38646 100644 --- a/tests/components/ps4/test_init.py +++ b/tests/components/ps4/test_init.py @@ -199,7 +199,7 @@ async def test_media_player_is_setup(hass: HomeAssistant) -> None: assert len(hass.data[PS4_DATA].devices) == 1 -async def setup_mock_component(hass): +async def setup_mock_component(hass: HomeAssistant) -> None: """Set up Mock Media Player.""" entry = MockConfigEntry(domain=ps4.DOMAIN, data=MOCK_DATA, version=VERSION) entry.add_to_manager(hass.config_entries) diff --git a/tests/components/ps4/test_media_player.py b/tests/components/ps4/test_media_player.py index e0be9d508fc..5268306c87a 100644 --- a/tests/components/ps4/test_media_player.py +++ b/tests/components/ps4/test_media_player.py @@ -1,5 +1,6 @@ """Tests for the PS4 media player platform.""" +from typing import Any from unittest.mock import MagicMock, patch from pyps4_2ndscreen.credential import get_ddp_message @@ -130,7 +131,9 @@ MOCK_CONFIG = MockConfigEntry(domain=DOMAIN, data=MOCK_DATA, entry_id=MOCK_ENTRY MOCK_LOAD = "homeassistant.components.ps4.media_player.load_games" -async def setup_mock_component(hass, entry=None): +async def setup_mock_component( + hass: HomeAssistant, entry: MockConfigEntry | None = None +) -> str: """Set up Mock Media Player.""" if entry is None: mock_entry = MockConfigEntry( @@ -150,7 +153,9 @@ async def setup_mock_component(hass, entry=None): return mock_entities[0] -async def mock_ddp_response(hass, mock_status_data): +async def mock_ddp_response( + hass: HomeAssistant, mock_status_data: dict[str, Any] +) -> None: """Mock raw UDP response from device.""" mock_protocol = hass.data[PS4_DATA].protocol assert mock_protocol.local_port == DEFAULT_UDP_PORT diff --git a/tests/components/purpleair/conftest.py b/tests/components/purpleair/conftest.py index 1305c98308d..3d6776dd12e 100644 --- a/tests/components/purpleair/conftest.py +++ b/tests/components/purpleair/conftest.py @@ -1,5 +1,7 @@ """Define fixtures for PurpleAir tests.""" +from collections.abc import Generator +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiopurpleair.endpoints.sensors import NearbySensorResult @@ -7,6 +9,7 @@ from aiopurpleair.models.sensors import GetSensorsResponse import pytest from homeassistant.components.purpleair import DOMAIN +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -16,7 +19,7 @@ TEST_SENSOR_INDEX2 = 567890 @pytest.fixture(name="api") -def api_fixture(get_sensors_response): +def api_fixture(get_sensors_response: GetSensorsResponse) -> Mock: """Define a fixture to return a mocked aiopurple API object.""" return Mock( async_check_api_key=AsyncMock(), @@ -34,7 +37,11 @@ def api_fixture(get_sensors_response): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config_entry_data, config_entry_options): +def config_entry_fixture( + hass: HomeAssistant, + config_entry_data: dict[str, Any], + config_entry_options: dict[str, Any], +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -48,7 +55,7 @@ def config_entry_fixture(hass, config_entry_data, config_entry_options): @pytest.fixture(name="config_entry_data") -def config_entry_data_fixture(): +def config_entry_data_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { "api_key": TEST_API_KEY, @@ -56,7 +63,7 @@ def config_entry_data_fixture(): @pytest.fixture(name="config_entry_options") -def config_entry_options_fixture(): +def config_entry_options_fixture() -> dict[str, Any]: """Define a config entry options fixture.""" return { "sensor_indices": [TEST_SENSOR_INDEX1], @@ -64,7 +71,7 @@ def config_entry_options_fixture(): @pytest.fixture(name="get_sensors_response", scope="package") -def get_sensors_response_fixture(): +def get_sensors_response_fixture() -> GetSensorsResponse: """Define a fixture to mock an aiopurpleair GetSensorsResponse object.""" return GetSensorsResponse.parse_raw( load_fixture("get_sensors_response.json", "purpleair") @@ -72,7 +79,7 @@ def get_sensors_response_fixture(): @pytest.fixture(name="mock_aiopurpleair") -async def mock_aiopurpleair_fixture(api): +def mock_aiopurpleair_fixture(api: Mock) -> Generator[Mock]: """Define a fixture to patch aiopurpleair.""" with ( patch("homeassistant.components.purpleair.config_flow.API", return_value=api), @@ -82,7 +89,9 @@ async def mock_aiopurpleair_fixture(api): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aiopurpleair): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aiopurpleair: Mock +) -> None: """Define a fixture to set up purpleair.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/purpleair/test_config_flow.py b/tests/components/purpleair/test_config_flow.py index 2345d98b5e1..998cb2b7878 100644 --- a/tests/components/purpleair/test_config_flow.py +++ b/tests/components/purpleair/test_config_flow.py @@ -6,13 +6,15 @@ from aiopurpleair.errors import InvalidApiKeyError, PurpleAirError import pytest from homeassistant.components.purpleair import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import device_registry as dr from .conftest import TEST_API_KEY, TEST_SENSOR_INDEX1, TEST_SENSOR_INDEX2 +from tests.common import MockConfigEntry + TEST_LATITUDE = 51.5285582 TEST_LONGITUDE = -0.2416796 @@ -127,19 +129,11 @@ async def test_reauth( mock_aiopurpleair, check_api_key_errors, check_api_key_mock, - config_entry, + config_entry: MockConfigEntry, setup_config_entry, ) -> None: """Test re-auth (including errors).""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data={"api_key": TEST_API_KEY}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/pushover/test_config_flow.py b/tests/components/pushover/test_config_flow.py index 14347084288..58485bfb427 100644 --- a/tests/components/pushover/test_config_flow.py +++ b/tests/components/pushover/test_config_flow.py @@ -149,14 +149,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -180,14 +173,7 @@ async def test_reauth_failed(hass: HomeAssistant, mock_pushover: MagicMock) -> N ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -223,14 +209,7 @@ async def test_reauth_with_existing_config(hass: HomeAssistant) -> None: ) entry2.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG, - ) + result = await entry2.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/pvoutput/test_config_flow.py b/tests/components/pvoutput/test_config_flow.py index 20e99f8e497..fc4335de00d 100644 --- a/tests/components/pvoutput/test_config_flow.py +++ b/tests/components/pvoutput/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock from pvo import PVOutputAuthenticationError, PVOutputConnectionError from homeassistant.components.pvoutput.const import CONF_SYSTEM_ID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -150,15 +150,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -192,15 +184,7 @@ async def test_reauth_with_authentication_error( """ mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -244,15 +228,7 @@ async def test_reauth_api_error( """Test API error during reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" diff --git a/tests/components/pyload/snapshots/test_sensor.ambr b/tests/components/pyload/snapshots/test_sensor.ambr index c1e5a9d6c3a..69d0387fc8f 100644 --- a/tests/components/pyload/snapshots/test_sensor.ambr +++ b/tests/components/pyload/snapshots/test_sensor.ambr @@ -99,56 +99,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_finished_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_finished_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Finished downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_finished_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Finished downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_finished_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -257,6 +207,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[CannotConnect][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -357,56 +357,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_finished_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_finished_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Finished downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_finished_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Finished downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_finished_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -515,6 +465,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[InvalidAuth][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_sensor_update_exceptions[ParserError][sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -615,56 +615,6 @@ 'state': 'unavailable', }) # --- -# name: test_sensor_update_exceptions[ParserError][sensor.pyload_finished_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_finished_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Finished downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_sensor_update_exceptions[ParserError][sensor.pyload_finished_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Finished downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_finished_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unavailable', - }) -# --- # name: test_sensor_update_exceptions[ParserError][sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -773,6 +723,56 @@ 'state': 'unavailable', }) # --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_sensor_update_exceptions[ParserError][sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_setup[sensor.pyload_active_downloads-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -873,56 +873,6 @@ 'state': '6', }) # --- -# name: test_setup[sensor.pyload_finished_downloads-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.pyload_finished_downloads', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Finished downloads', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_total', - 'unit_of_measurement': 'downloads', - }) -# --- -# name: test_setup[sensor.pyload_finished_downloads-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'pyLoad Finished downloads', - 'state_class': , - 'unit_of_measurement': 'downloads', - }), - 'context': , - 'entity_id': 'sensor.pyload_finished_downloads', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '37', - }) -# --- # name: test_setup[sensor.pyload_free_space-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1031,3 +981,53 @@ 'state': '43.247704', }) # --- +# name: test_setup[sensor.pyload_total_downloads-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.pyload_total_downloads', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Total downloads', + 'platform': 'pyload', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': , + 'unique_id': 'XXXXXXXXXXXXXX_total', + 'unit_of_measurement': 'downloads', + }) +# --- +# name: test_setup[sensor.pyload_total_downloads-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'pyLoad Total downloads', + 'state_class': , + 'unit_of_measurement': 'downloads', + }), + 'context': , + 'entity_id': 'sensor.pyload_total_downloads', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '37', + }) +# --- diff --git a/tests/components/pyload/snapshots/test_switch.ambr b/tests/components/pyload/snapshots/test_switch.ambr index b6465341b0a..0fcc45f8586 100644 --- a/tests/components/pyload/snapshots/test_switch.ambr +++ b/tests/components/pyload/snapshots/test_switch.ambr @@ -93,50 +93,3 @@ 'state': 'on', }) # --- -# name: test_state[switch.pyload_reconnect-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': None, - 'entity_id': 'switch.pyload_reconnect', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Reconnect', - 'platform': 'pyload', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': , - 'unique_id': 'XXXXXXXXXXXXXX_reconnect', - 'unit_of_measurement': None, - }) -# --- -# name: test_state[switch.pyload_reconnect-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'switch', - 'friendly_name': 'pyLoad Reconnect', - }), - 'context': , - 'entity_id': 'switch.pyload_reconnect', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/pyload/test_config_flow.py b/tests/components/pyload/test_config_flow.py index 8c775412371..b4ff63e79f9 100644 --- a/tests/components/pyload/test_config_flow.py +++ b/tests/components/pyload/test_config_flow.py @@ -6,12 +6,7 @@ from pyloadapi.exceptions import CannotConnect, InvalidAuth, ParserError import pytest from homeassistant.components.pyload.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import ( - SOURCE_IMPORT, - SOURCE_REAUTH, - SOURCE_RECONFIGURE, - SOURCE_USER, -) +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -180,14 +175,7 @@ async def test_reauth( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -222,14 +210,7 @@ async def test_reauth_errors( config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/radarr/test_config_flow.py b/tests/components/radarr/test_config_flow.py index 407b7b50c48..0ff93536957 100644 --- a/tests/components/radarr/test_config_flow.py +++ b/tests/components/radarr/test_config_flow.py @@ -6,7 +6,7 @@ from aiopyarr import exceptions import pytest from homeassistant.components.radarr.const import DEFAULT_NAME, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -143,15 +143,7 @@ async def test_full_reauth_flow_implementation( ) -> None: """Test the manual reauth flow from start to finish.""" entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/rainforest_eagle/conftest.py b/tests/components/rainforest_eagle/conftest.py index 1aff693e61f..c3790a12e86 100644 --- a/tests/components/rainforest_eagle/conftest.py +++ b/tests/components/rainforest_eagle/conftest.py @@ -1,6 +1,7 @@ """Conftest for rainforest_eagle.""" -from unittest.mock import AsyncMock, Mock, patch +from collections.abc import AsyncGenerator +from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -13,6 +14,7 @@ from homeassistant.components.rainforest_eagle.const import ( TYPE_EAGLE_200, ) from homeassistant.const import CONF_HOST, CONF_TYPE +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from . import MOCK_200_RESPONSE_WITHOUT_PRICE, MOCK_CLOUD_ID @@ -21,7 +23,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def config_entry_200(hass): +def config_entry_200(hass: HomeAssistant) -> MockConfigEntry: """Return a config entry.""" entry = MockConfigEntry( domain="rainforest_eagle", @@ -38,7 +40,9 @@ def config_entry_200(hass): @pytest.fixture -async def setup_rainforest_200(hass, config_entry_200): +async def setup_rainforest_200( + hass: HomeAssistant, config_entry_200: MockConfigEntry +) -> AsyncGenerator[Mock]: """Set up rainforest.""" with patch( "aioeagle.ElectricMeter.create_instance", @@ -53,7 +57,7 @@ async def setup_rainforest_200(hass, config_entry_200): @pytest.fixture -async def setup_rainforest_100(hass): +async def setup_rainforest_100(hass: HomeAssistant) -> AsyncGenerator[MagicMock]: """Set up rainforest.""" MockConfigEntry( domain="rainforest_eagle", diff --git a/tests/components/rainmachine/conftest.py b/tests/components/rainmachine/conftest.py index 717d74b421b..22ee807d187 100644 --- a/tests/components/rainmachine/conftest.py +++ b/tests/components/rainmachine/conftest.py @@ -1,5 +1,6 @@ """Define test fixtures for RainMachine.""" +from collections.abc import AsyncGenerator import json from typing import Any from unittest.mock import AsyncMock, patch @@ -8,19 +9,20 @@ import pytest from homeassistant.components.rainmachine import DOMAIN from homeassistant.const import CONF_IP_ADDRESS, CONF_PASSWORD, CONF_PORT, CONF_SSL +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture @pytest.fixture(name="client") -def client_fixture(controller, controller_mac): +def client_fixture(controller: AsyncMock, controller_mac: str) -> AsyncMock: """Define a regenmaschine client.""" return AsyncMock(load_local=AsyncMock(), controllers={controller_mac: controller}) @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_IP_ADDRESS: "192.168.1.100", @@ -31,7 +33,9 @@ def config_fixture(hass): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config, controller_mac): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any], controller_mac: str +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -78,7 +82,7 @@ def controller_fixture( @pytest.fixture(name="controller_mac") -def controller_mac_fixture(): +def controller_mac_fixture() -> str: """Define a controller MAC address.""" return "aa:bb:cc:dd:ee:ff" @@ -145,7 +149,9 @@ def data_zones_fixture(): @pytest.fixture(name="setup_rainmachine") -async def setup_rainmachine_fixture(hass, client, config): +async def setup_rainmachine_fixture( + hass: HomeAssistant, client: AsyncMock, config: dict[str, Any] +) -> AsyncGenerator[None]: """Define a fixture to set up RainMachine.""" with ( patch("homeassistant.components.rainmachine.Client", return_value=client), diff --git a/tests/components/recollect_waste/conftest.py b/tests/components/recollect_waste/conftest.py index 360dd8aac98..8384da3f388 100644 --- a/tests/components/recollect_waste/conftest.py +++ b/tests/components/recollect_waste/conftest.py @@ -1,6 +1,7 @@ """Define test fixtures for ReCollect Waste.""" from datetime import date +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiorecollect.client import PickupEvent, PickupType @@ -11,6 +12,7 @@ from homeassistant.components.recollect_waste.const import ( CONF_SERVICE_ID, DOMAIN, ) +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -25,7 +27,9 @@ def client_fixture(pickup_events): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=f"{TEST_PLACE_ID}, {TEST_SERVICE_ID}", data=config @@ -35,7 +39,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_PLACE_ID: TEST_PLACE_ID, @@ -54,7 +58,7 @@ def pickup_events_fixture(): @pytest.fixture(name="mock_aiorecollect") -async def mock_aiorecollect_fixture(client): +def mock_aiorecollect_fixture(client): """Define a fixture to patch aiorecollect.""" with ( patch( @@ -70,7 +74,9 @@ async def mock_aiorecollect_fixture(client): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aiorecollect): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aiorecollect: None +) -> None: """Define a fixture to set up recollect_waste.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/recorder/common.py b/tests/components/recorder/common.py index aee35fceb80..18e58d9e572 100644 --- a/tests/components/recorder/common.py +++ b/tests/components/recorder/common.py @@ -265,12 +265,16 @@ def assert_dict_of_states_equal_without_context_and_last_changed( ) -async def async_record_states(hass: HomeAssistant): +async def async_record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State | None]]]: """Record some test states.""" return await hass.async_add_executor_job(record_states, hass) -def record_states(hass): +def record_states( + hass: HomeAssistant, +) -> tuple[datetime, datetime, dict[str, list[State | None]]]: """Record some test states. We inject a bunch of state updates temperature sensors. diff --git a/tests/components/recorder/conftest.py b/tests/components/recorder/conftest.py index f562ba163ba..9cdf9dbb372 100644 --- a/tests/components/recorder/conftest.py +++ b/tests/components/recorder/conftest.py @@ -1,12 +1,15 @@ """Fixtures for the recorder component tests.""" -from collections.abc import AsyncGenerator, Generator +from collections.abc import Callable, Generator +from contextlib import contextmanager from dataclasses import dataclass from functools import partial import threading from unittest.mock import Mock, patch import pytest +from sqlalchemy.engine import Engine +from sqlalchemy.orm.session import Session from homeassistant.components import recorder from homeassistant.components.recorder import db_schema @@ -57,31 +60,70 @@ def recorder_dialect_name(hass: HomeAssistant, db_engine: str) -> Generator[None class InstrumentedMigration: """Container to aid controlling migration progress.""" - migration_done: threading.Event + live_migration_done: threading.Event + live_migration_done_stall: threading.Event migration_stall: threading.Event migration_started: threading.Event migration_version: int | None + non_live_migration_done: threading.Event + non_live_migration_done_stall: threading.Event apply_update_mock: Mock + stall_on_schema_version: int | None + apply_update_stalled: threading.Event + apply_update_version: int | None -@pytest.fixture -async def instrument_migration( +@pytest.fixture(name="instrument_migration") +def instrument_migration_fixture( hass: HomeAssistant, -) -> AsyncGenerator[InstrumentedMigration]: +) -> Generator[InstrumentedMigration]: + """Instrument recorder migration.""" + with instrument_migration(hass) as instrumented_migration: + yield instrumented_migration + + +@contextmanager +def instrument_migration( + hass: HomeAssistant, +) -> Generator[InstrumentedMigration]: """Instrument recorder migration.""" real_migrate_schema_live = recorder.migration.migrate_schema_live real_migrate_schema_non_live = recorder.migration.migrate_schema_non_live real_apply_update = recorder.migration._apply_update - def _instrument_migrate_schema(real_func, *args): + def _instrument_migrate_schema_live(real_func, *args): + """Control migration progress and check results.""" + return _instrument_migrate_schema( + real_func, + args, + instrumented_migration.live_migration_done, + instrumented_migration.live_migration_done_stall, + ) + + def _instrument_migrate_schema_non_live(real_func, *args): + """Control migration progress and check results.""" + return _instrument_migrate_schema( + real_func, + args, + instrumented_migration.non_live_migration_done, + instrumented_migration.non_live_migration_done_stall, + ) + + def _instrument_migrate_schema( + real_func, + args, + migration_done: threading.Event, + migration_done_stall: threading.Event, + ): """Control migration progress and check results.""" instrumented_migration.migration_started.set() try: migration_result = real_func(*args) except Exception: - instrumented_migration.migration_done.set() + migration_done.set() + migration_done_stall.wait() raise # Check and report the outcome of the migration; if migration fails @@ -93,22 +135,38 @@ async def instrument_migration( .first() ) instrumented_migration.migration_version = res.schema_version - instrumented_migration.migration_done.set() + migration_done.set() + migration_done_stall.wait() return migration_result - def _instrument_apply_update(*args): + def _instrument_apply_update( + instance: recorder.Recorder, + hass: HomeAssistant, + engine: Engine, + session_maker: Callable[[], Session], + new_version: int, + old_version: int, + ): """Control migration progress.""" - instrumented_migration.migration_stall.wait() - real_apply_update(*args) + instrumented_migration.apply_update_version = new_version + stall_version = instrumented_migration.stall_on_schema_version + if stall_version is None or stall_version == new_version: + instrumented_migration.apply_update_stalled.set() + instrumented_migration.migration_stall.wait() + real_apply_update( + instance, hass, engine, session_maker, new_version, old_version + ) with ( patch( "homeassistant.components.recorder.migration.migrate_schema_live", - wraps=partial(_instrument_migrate_schema, real_migrate_schema_live), + wraps=partial(_instrument_migrate_schema_live, real_migrate_schema_live), ), patch( "homeassistant.components.recorder.migration.migrate_schema_non_live", - wraps=partial(_instrument_migrate_schema, real_migrate_schema_non_live), + wraps=partial( + _instrument_migrate_schema_non_live, real_migrate_schema_non_live + ), ), patch( "homeassistant.components.recorder.migration._apply_update", @@ -116,11 +174,19 @@ async def instrument_migration( ) as apply_update_mock, ): instrumented_migration = InstrumentedMigration( - migration_done=threading.Event(), + live_migration_done=threading.Event(), + live_migration_done_stall=threading.Event(), migration_stall=threading.Event(), migration_started=threading.Event(), migration_version=None, + non_live_migration_done=threading.Event(), + non_live_migration_done_stall=threading.Event(), apply_update_mock=apply_update_mock, + stall_on_schema_version=None, + apply_update_stalled=threading.Event(), + apply_update_version=None, ) + instrumented_migration.live_migration_done_stall.set() + instrumented_migration.non_live_migration_done_stall.set() yield instrumented_migration diff --git a/tests/components/recorder/db_schema_16.py b/tests/components/recorder/db_schema_16.py index 24786b1ad44..ffee438f2e9 100644 --- a/tests/components/recorder/db_schema_16.py +++ b/tests/components/recorder/db_schema_16.py @@ -356,7 +356,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_18.py b/tests/components/recorder/db_schema_18.py index db6fbb78f56..09cd41d9e33 100644 --- a/tests/components/recorder/db_schema_18.py +++ b/tests/components/recorder/db_schema_18.py @@ -369,7 +369,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_22.py b/tests/components/recorder/db_schema_22.py index cd0dc52a927..d05cb48ff6f 100644 --- a/tests/components/recorder/db_schema_22.py +++ b/tests/components/recorder/db_schema_22.py @@ -488,7 +488,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_23.py b/tests/components/recorder/db_schema_23.py index 9187d271216..9dffadaa0cc 100644 --- a/tests/components/recorder/db_schema_23.py +++ b/tests/components/recorder/db_schema_23.py @@ -478,7 +478,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/db_schema_23_with_newer_columns.py b/tests/components/recorder/db_schema_23_with_newer_columns.py index 9f902523c64..4343f53d00d 100644 --- a/tests/components/recorder/db_schema_23_with_newer_columns.py +++ b/tests/components/recorder/db_schema_23_with_newer_columns.py @@ -602,7 +602,7 @@ class LazyState(State): "_context", ] - def __init__(self, row): # pylint: disable=super-init-not-called + def __init__(self, row) -> None: # pylint: disable=super-init-not-called """Init the lazy state.""" self._row = row self.entity_id = self._row.entity_id diff --git a/tests/components/recorder/test_init.py b/tests/components/recorder/test_init.py index 3cd4c3ab4b6..3bbc78e21ce 100644 --- a/tests/components/recorder/test_init.py +++ b/tests/components/recorder/test_init.py @@ -72,12 +72,13 @@ from homeassistant.const import ( STATE_LOCKED, STATE_UNLOCKED, ) -from homeassistant.core import Context, CoreState, Event, HomeAssistant, callback +from homeassistant.core import Context, CoreState, Event, HomeAssistant, State, callback from homeassistant.helpers import ( entity_registry as er, issue_registry as ir, recorder as recorder_helper, ) +from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util from homeassistant.util.json import json_loads @@ -123,7 +124,7 @@ def small_cache_size() -> Generator[None]: yield -def _default_recorder(hass): +def _default_recorder(hass: HomeAssistant) -> Recorder: """Return a recorder with reasonable defaults.""" return Recorder( hass, @@ -165,11 +166,10 @@ async def test_shutdown_before_startup_finishes( await hass.async_block_till_done() await hass.async_stop() - def _run_information_with_session(): - instance.recorder_and_worker_thread_ids.add(threading.get_ident()) - return run_information_with_session(session) - - run_info = await instance.async_add_executor_job(_run_information_with_session) + # The database executor is shutdown so we must run the + # query in the main thread for testing + instance.recorder_and_worker_thread_ids.add(threading.get_ident()) + run_info = run_information_with_session(session) assert run_info.run_id == 1 assert run_info.start is not None @@ -215,8 +215,7 @@ async def test_shutdown_closes_connections( instance = recorder.get_instance(hass) await instance.async_db_ready await hass.async_block_till_done() - pool = instance.engine.pool - pool.shutdown = Mock() + pool = instance.engine def _ensure_connected(): with session_scope(hass=hass, read_only=True) as session: @@ -224,10 +223,11 @@ async def test_shutdown_closes_connections( await instance.async_add_executor_job(_ensure_connected) - hass.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE) - await hass.async_block_till_done() + with patch.object(pool, "dispose", wraps=pool.dispose) as dispose: + hass.bus.async_fire(EVENT_HOMEASSISTANT_FINAL_WRITE) + await hass.async_block_till_done() - assert len(pool.shutdown.mock_calls) == 1 + assert len(dispose.mock_calls) == 1 with pytest.raises(RuntimeError): assert instance.get_session() @@ -581,7 +581,7 @@ async def test_saving_state_with_commit_interval_zero( assert db_states[0].event_id is None -async def _add_entities(hass, entity_ids): +async def _add_entities(hass: HomeAssistant, entity_ids: list[str]) -> list[State]: """Add entities.""" attributes = {"test_attr": 5, "test_attr_10": "nice"} for idx, entity_id in enumerate(entity_ids): @@ -605,7 +605,7 @@ async def _add_entities(hass, entity_ids): return states -def _state_with_context(hass, entity_id): +def _state_with_context(hass: HomeAssistant, entity_id: str) -> State | None: # We don't restore context unless we need it by joining the # events table on the event_id for state_changed events return hass.states.get(entity_id) @@ -1004,7 +1004,7 @@ async def test_defaults_set(hass: HomeAssistant) -> None: """Test the config defaults are set.""" recorder_config = None - async def mock_setup(hass, config): + async def mock_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" nonlocal recorder_config recorder_config = config["recorder"] @@ -1366,7 +1366,7 @@ async def test_statistics_runs_initiated( @pytest.mark.freeze_time("2022-09-13 09:00:00+02:00") @pytest.mark.parametrize("persistent_database", [True]) -@pytest.mark.parametrize("enable_statistics", [True]) +@pytest.mark.parametrize("enable_missing_statistics", [True]) @pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage async def test_compile_missing_statistics( async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory @@ -2322,7 +2322,7 @@ async def test_connect_args_priority(hass: HomeAssistant, config_url) -> None: __bases__ = [] _has_events = False - def __init__(*args, **kwargs): ... + def __init__(self, *args: Any, **kwargs: Any) -> None: ... @property def is_async(self): diff --git a/tests/components/recorder/test_migrate.py b/tests/components/recorder/test_migrate.py index 988eade29b6..0e473b702ef 100644 --- a/tests/components/recorder/test_migrate.py +++ b/tests/components/recorder/test_migrate.py @@ -7,7 +7,9 @@ import sys from unittest.mock import ANY, Mock, PropertyMock, call, patch import pytest -from sqlalchemy import create_engine, text +from sqlalchemy import create_engine, inspect, text +from sqlalchemy.engine import Engine +from sqlalchemy.engine.interfaces import ReflectedForeignKeyConstraint from sqlalchemy.exc import ( DatabaseError, InternalError, @@ -27,7 +29,7 @@ from homeassistant.components.recorder.db_schema import ( States, ) from homeassistant.components.recorder.util import session_scope -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, State from homeassistant.helpers import recorder as recorder_helper import homeassistant.util.dt as dt_util @@ -45,7 +47,7 @@ async def mock_recorder_before_hass( """Set up recorder.""" -def _get_native_states(hass, entity_id): +def _get_native_states(hass: HomeAssistant, entity_id: str) -> list[State]: with session_scope(hass=hass, read_only=True) as session: instance = recorder.get_instance(hass) metadata_id = instance.states_meta_manager.get(entity_id, session, True) @@ -199,6 +201,84 @@ async def test_database_migration_failed( assert len(mock_dismiss.mock_calls) == expected_pn_dismiss +@pytest.mark.parametrize( + ( + "patch_version", + "func_to_patch", + "expected_setup_result", + "expected_pn_create", + "expected_pn_dismiss", + ), + [ + # Test error handling in _update_states_table_with_foreign_key_options + (11, "homeassistant.components.recorder.migration.DropConstraint", False, 1, 0), + # Test error handling in _modify_columns + (12, "sqlalchemy.engine.base.Connection.execute", False, 1, 0), + # Test error handling in _drop_foreign_key_constraints + (46, "homeassistant.components.recorder.migration.DropConstraint", False, 2, 1), + ], +) +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +async def test_database_migration_failed_non_sqlite( + hass: HomeAssistant, + async_setup_recorder_instance: RecorderInstanceGenerator, + instrument_migration: InstrumentedMigration, + patch_version: int, + func_to_patch: str, + expected_setup_result: bool, + expected_pn_create: int, + expected_pn_dismiss: int, +) -> None: + """Test we notify if the migration fails.""" + assert recorder.util.async_migration_in_progress(hass) is False + instrument_migration.stall_on_schema_version = patch_version + + with ( + patch( + "homeassistant.components.recorder.core.create_engine", + new=create_engine_test, + ), + patch( + "homeassistant.components.persistent_notification.create", + side_effect=pn.create, + ) as mock_create, + patch( + "homeassistant.components.persistent_notification.dismiss", + side_effect=pn.dismiss, + ) as mock_dismiss, + ): + await async_setup_recorder_instance( + hass, + wait_recorder=False, + wait_recorder_setup=False, + expected_setup_result=expected_setup_result, + ) + # Wait for migration to reach the schema version we want to break + await hass.async_add_executor_job( + instrument_migration.apply_update_stalled.wait + ) + + # Make it fail + with patch( + func_to_patch, + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ): + instrument_migration.migration_stall.set() + hass.states.async_set("my.entity", "on", {}) + hass.states.async_set("my.entity", "off", {}) + await hass.async_block_till_done() + await hass.async_add_executor_job(recorder.get_instance(hass).join) + await hass.async_block_till_done() + + assert instrument_migration.apply_update_version == patch_version + assert recorder.util.async_migration_in_progress(hass) is False + assert len(mock_create.mock_calls) == expected_pn_create + assert len(mock_dismiss.mock_calls) == expected_pn_dismiss + + @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") async def test_live_database_migration_encounters_corruption( @@ -553,7 +633,7 @@ async def test_schema_migrate( assert recorder.util.async_migration_is_live(hass) == live instrument_migration.migration_stall.set() await hass.async_block_till_done() - await hass.async_add_executor_job(instrument_migration.migration_done.wait) + await hass.async_add_executor_job(instrument_migration.live_migration_done.wait) await async_wait_recording_done(hass) assert instrument_migration.migration_version == db_schema.SCHEMA_VERSION assert setup_run.called @@ -895,31 +975,40 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: ], } + def find_constraints( + engine: Engine, table: str, column: str + ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: + inspector = inspect(engine) + return [ + (table, column, foreign_key) + for foreign_key in inspector.get_foreign_keys(table) + if foreign_key["name"] and foreign_key["constrained_columns"] == [column] + ] + engine = create_engine(recorder_db_url) db_schema.Base.metadata.create_all(engine) + matching_constraints_1 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_1 == expected_dropped_constraints[db_engine] + with Session(engine) as session: session_maker = Mock(return_value=session) - dropped_constraints_1 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in migration._drop_foreign_key_constraints( + for table, column, _, _ in constraints_to_recreate: + migration._drop_foreign_key_constraints( session_maker, engine, table, column - )[1] - ] - assert dropped_constraints_1 == expected_dropped_constraints[db_engine] + ) # Check we don't find the constrained columns again (they are removed) - with Session(engine) as session: - session_maker = Mock(return_value=session) - dropped_constraints_2 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in migration._drop_foreign_key_constraints( - session_maker, engine, table, column - )[1] - ] - assert dropped_constraints_2 == [] + matching_constraints_2 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_2 == [] # Restore the constraints with Session(engine) as session: @@ -929,16 +1018,293 @@ def test_drop_restore_foreign_key_constraints(recorder_db_url: str) -> None: ) # Check we do find the constrained columns again (they are restored) + matching_constraints_3 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_3 == expected_dropped_constraints[db_engine] + + engine.dispose() + + +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_restore_foreign_key_constraints_twice(recorder_db_url: str) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints_to_recreate = ( + ("events", "data_id", "event_data", "data_id"), + ("states", "event_id", None, None), # This won't be found + ("states", "old_state_id", "states", "state_id"), + ) + + db_engine = recorder_db_url.partition("://")[0] + + expected_dropped_constraints = { + "mysql": [ + ( + "events", + "data_id", + { + "constrained_columns": ["data_id"], + "name": ANY, + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "constrained_columns": ["old_state_id"], + "name": ANY, + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + "postgresql": [ + ( + "events", + "data_id", + { + "comment": None, + "constrained_columns": ["data_id"], + "name": "events_data_id_fkey", + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "comment": None, + "constrained_columns": ["old_state_id"], + "name": "states_old_state_id_fkey", + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + } + + def find_constraints( + engine: Engine, table: str, column: str + ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: + inspector = inspect(engine) + return [ + (table, column, foreign_key) + for foreign_key in inspector.get_foreign_keys(table) + if foreign_key["name"] and foreign_key["constrained_columns"] == [column] + ] + + engine = create_engine(recorder_db_url) + db_schema.Base.metadata.create_all(engine) + + matching_constraints_1 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_1 == expected_dropped_constraints[db_engine] + with Session(engine) as session: session_maker = Mock(return_value=session) - dropped_constraints_3 = [ - dropped_constraint - for table, column, _, _ in constraints_to_recreate - for dropped_constraint in migration._drop_foreign_key_constraints( + for table, column, _, _ in constraints_to_recreate: + migration._drop_foreign_key_constraints( session_maker, engine, table, column - )[1] + ) + + # Check we don't find the constrained columns again (they are removed) + matching_constraints_2 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_2 == [] + + # Restore the constraints + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + # Restore the constraints again + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + # Check we do find a single the constrained columns again (they are restored + # only once, even though we called _restore_foreign_key_constraints twice) + matching_constraints_3 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_3 == expected_dropped_constraints[db_engine] + + engine.dispose() + + +@pytest.mark.skip_on_db_engine(["sqlite"]) +@pytest.mark.usefixtures("skip_by_db_engine") +def test_drop_duplicated_foreign_key_constraints(recorder_db_url: str) -> None: + """Test we can drop and then restore foreign keys. + + This is not supported on SQLite + """ + + constraints_to_recreate = ( + ("events", "data_id", "event_data", "data_id"), + ("states", "event_id", None, None), # This won't be found + ("states", "old_state_id", "states", "state_id"), + ) + + db_engine = recorder_db_url.partition("://")[0] + + expected_dropped_constraints = { + "mysql": [ + ( + "events", + "data_id", + { + "constrained_columns": ["data_id"], + "name": ANY, + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "constrained_columns": ["old_state_id"], + "name": ANY, + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + "postgresql": [ + ( + "events", + "data_id", + { + "comment": None, + "constrained_columns": ["data_id"], + "name": ANY, + "options": {}, + "referred_columns": ["data_id"], + "referred_schema": None, + "referred_table": "event_data", + }, + ), + ( + "states", + "old_state_id", + { + "comment": None, + "constrained_columns": ["old_state_id"], + "name": ANY, + "options": {}, + "referred_columns": ["state_id"], + "referred_schema": None, + "referred_table": "states", + }, + ), + ], + } + + def find_constraints( + engine: Engine, table: str, column: str + ) -> list[tuple[str, str, ReflectedForeignKeyConstraint]]: + inspector = inspect(engine) + return [ + (table, column, foreign_key) + for foreign_key in inspector.get_foreign_keys(table) + if foreign_key["name"] and foreign_key["constrained_columns"] == [column] ] - assert dropped_constraints_3 == expected_dropped_constraints[db_engine] + + engine = create_engine(recorder_db_url) + db_schema.Base.metadata.create_all(engine) + + # Create a duplicate of the constraints + inspector = Mock(name="inspector") + inspector.get_foreign_keys = Mock(name="get_foreign_keys", return_value=[]) + with ( + patch( + "homeassistant.components.recorder.migration.sqlalchemy.inspect", + return_value=inspector, + ), + Session(engine) as session, + ): + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + matching_constraints_1 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + _expected_dropped_constraints = [ + _dropped_constraint + for dropped_constraint in expected_dropped_constraints[db_engine] + for _dropped_constraint in (dropped_constraint, dropped_constraint) + ] + assert matching_constraints_1 == _expected_dropped_constraints + + with Session(engine) as session: + session_maker = Mock(return_value=session) + for table, column, _, _ in constraints_to_recreate: + migration._drop_foreign_key_constraints( + session_maker, engine, table, column + ) + + # Check we don't find the constrained columns again (they are removed) + matching_constraints_2 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_2 == [] + + # Restore the constraints + with Session(engine) as session: + session_maker = Mock(return_value=session) + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_recreate + ) + + # Check we do find a single the constrained columns again (they are restored + # only once, even though we called _restore_foreign_key_constraints twice) + matching_constraints_3 = [ + dropped_constraint + for table, column, _, _ in constraints_to_recreate + for dropped_constraint in find_constraints(engine, table, column) + ] + assert matching_constraints_3 == expected_dropped_constraints[db_engine] engine.dispose() @@ -960,11 +1326,15 @@ def test_restore_foreign_key_constraints_with_error( instance = Mock() instance.get_session = Mock(return_value=session) engine = Mock() + inspector = Mock(name="inspector") + inspector.get_foreign_keys = Mock(name="get_foreign_keys", return_value=[]) + engine._sa_instance_state = inspector session_maker = Mock(return_value=session) - migration._restore_foreign_key_constraints( - session_maker, engine, constraints_to_restore - ) + with pytest.raises(InternalError): + migration._restore_foreign_key_constraints( + session_maker, engine, constraints_to_restore + ) assert "Could not update foreign options in events table" in caplog.text @@ -1052,3 +1422,51 @@ def test_delete_foreign_key_violations_unsupported_engine( RuntimeError, match="_delete_foreign_key_violations not supported for sqlite" ): migration._delete_foreign_key_violations(session_maker, engine, "", "", "", "") + + +def test_drop_foreign_key_constraints_unsupported_engine( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling _drop_foreign_key_constraints with an unsupported engine.""" + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + engine.dialect = Mock() + engine.dialect.name = "sqlite" + + session_maker = Mock(return_value=session) + with pytest.raises( + RuntimeError, match="_drop_foreign_key_constraints not supported for sqlite" + ): + migration._drop_foreign_key_constraints(session_maker, engine, "", "") + + +def test_update_states_table_with_foreign_key_options_unsupported_engine( + caplog: pytest.LogCaptureFixture, +) -> None: + """Test calling function with an unsupported engine. + + This tests _update_states_table_with_foreign_key_options. + """ + + connection = Mock() + connection.execute = Mock(side_effect=InternalError(None, None, None)) + session = Mock() + session.connection = Mock(return_value=connection) + instance = Mock() + instance.get_session = Mock(return_value=session) + engine = Mock() + engine.dialect = Mock() + engine.dialect.name = "sqlite" + + session_maker = Mock(return_value=session) + with pytest.raises( + RuntimeError, + match="_update_states_table_with_foreign_key_options not supported for sqlite", + ): + migration._update_states_table_with_foreign_key_options(session_maker, engine) diff --git a/tests/components/recorder/test_purge.py b/tests/components/recorder/test_purge.py index 60ee913cb66..245acf4603d 100644 --- a/tests/components/recorder/test_purge.py +++ b/tests/components/recorder/test_purge.py @@ -1330,7 +1330,9 @@ async def test_purge_filtered_events_state_changed( async def test_purge_entities(hass: HomeAssistant, recorder_mock: Recorder) -> None: """Test purging of specific entities.""" - async def _purge_entities(hass, entity_ids, domains, entity_globs): + async def _purge_entities( + hass: HomeAssistant, entity_ids: str, domains: str, entity_globs: str + ) -> None: service_data = { "entity_id": entity_ids, "domains": domains, diff --git a/tests/components/recorder/test_statistics_v23_migration.py b/tests/components/recorder/test_statistics_v23_migration.py index dfa87fc9391..53c59635e8c 100644 --- a/tests/components/recorder/test_statistics_v23_migration.py +++ b/tests/components/recorder/test_statistics_v23_migration.py @@ -17,18 +17,17 @@ import pytest from homeassistant.components import recorder from homeassistant.components.recorder import get_instance from homeassistant.components.recorder.util import session_scope -from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import setup_component import homeassistant.util.dt as dt_util from .common import ( CREATE_ENGINE_TARGET, + async_wait_recording_done, create_engine_test_for_schema_version_postfix, get_schema_module_path, - wait_recording_done, ) -from tests.common import get_test_home_assistant +from tests.common import async_test_home_assistant +from tests.typing import RecorderInstanceGenerator SCHEMA_VERSION_POSTFIX = "23_with_newer_columns" SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) @@ -37,8 +36,8 @@ SCHEMA_MODULE = get_schema_module_path(SCHEMA_VERSION_POSTFIX) @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) -def test_delete_duplicates( - recorder_db_url: str, caplog: pytest.LogCaptureFixture +async def test_delete_duplicates( + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test removal of duplicated statistics. @@ -176,42 +175,42 @@ def test_delete_duplicates( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), - get_test_home_assistant() as hass, ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - wait_recording_done(hass) - wait_recording_done(hass) + async with async_test_home_assistant() as hass, async_test_recorder(hass): + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - for stat in external_co2_statistics: - session.add(recorder.db_schema.Statistics.from_stats(3, stat)) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_1 + ) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_2 + ) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + for stat in external_co2_statistics: + session.add(recorder.db_schema.Statistics.from_stats(3, stat)) - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 23 - with get_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) - hass.stop() + async with async_test_home_assistant() as hass, async_test_recorder(hass): + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + await hass.async_stop() assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Found non identical" not in caplog.text @@ -221,8 +220,8 @@ def test_delete_duplicates( @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) -def test_delete_duplicates_many( - recorder_db_url: str, caplog: pytest.LogCaptureFixture +async def test_delete_duplicates_many( + async_test_recorder: RecorderInstanceGenerator, caplog: pytest.LogCaptureFixture ) -> None: """Test removal of duplicated statistics. @@ -360,48 +359,48 @@ def test_delete_duplicates_many( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), - get_test_home_assistant() as hass, ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - wait_recording_done(hass) - wait_recording_done(hass) + async with async_test_home_assistant() as hass, async_test_recorder(hass): + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for _ in range(3000): + with session_scope(hass=hass) as session: session.add( - recorder.db_schema.Statistics.from_stats( - 1, external_energy_statistics_1[-1] + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_1 ) ) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - for stat in external_co2_statistics: - session.add(recorder.db_schema.Statistics.from_stats(3, stat)) + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_2 + ) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta(external_co2_metadata) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for _ in range(3000): + session.add( + recorder.db_schema.Statistics.from_stats( + 1, external_energy_statistics_1[-1] + ) + ) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + for stat in external_co2_statistics: + session.add(recorder.db_schema.Statistics.from_stats(3, stat)) - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 23 - with get_test_home_assistant() as hass: - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) - hass.stop() + async with async_test_home_assistant() as hass, async_test_recorder(hass): + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + await hass.async_stop() assert "Deleted 3002 duplicated statistics rows" in caplog.text assert "Found non identical" not in caplog.text @@ -412,8 +411,10 @@ def test_delete_duplicates_many( @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") @pytest.mark.parametrize("persistent_database", [True]) -def test_delete_duplicates_non_identical( - recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path +async def test_delete_duplicates_non_identical( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + tmp_path: Path, ) -> None: """Test removal of duplicated statistics. @@ -521,38 +522,40 @@ def test_delete_duplicates_non_identical( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), - get_test_home_assistant() as hass, ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - wait_recording_done(hass) - wait_recording_done(hass) + async with async_test_home_assistant() as hass, async_test_recorder(hass): + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) - ) - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_2) - ) - with session_scope(hass=hass) as session: - for stat in external_energy_statistics_1: - session.add(recorder.db_schema.Statistics.from_stats(1, stat)) - for stat in external_energy_statistics_2: - session.add(recorder.db_schema.Statistics.from_stats(2, stat)) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_1 + ) + ) + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_2 + ) + ) + with session_scope(hass=hass) as session: + for stat in external_energy_statistics_1: + session.add(recorder.db_schema.Statistics.from_stats(1, stat)) + for stat in external_energy_statistics_2: + session.add(recorder.db_schema.Statistics.from_stats(2, stat)) - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 23 - with get_test_home_assistant() as hass: - hass.config.config_dir = tmp_path - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) - hass.stop() + async with ( + async_test_home_assistant(config_dir=tmp_path) as hass, + async_test_recorder(hass), + ): + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + await hass.async_stop() assert "Deleted 2 duplicated statistics rows" in caplog.text assert "Deleted 1 non identical" in caplog.text @@ -561,8 +564,11 @@ def test_delete_duplicates_non_identical( isotime = dt_util.utcnow().isoformat() backup_file_name = f".storage/deleted_statistics.{isotime}.json" - with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file: - backup = json.load(backup_file) + def read_backup(): + with open(hass.config.path(backup_file_name), encoding="utf8") as backup_file: + return json.load(backup_file) + + backup = await hass.async_add_executor_job(read_backup) assert backup == [ { @@ -597,8 +603,10 @@ def test_delete_duplicates_non_identical( @pytest.mark.parametrize("persistent_database", [True]) @pytest.mark.skip_on_db_engine(["mysql", "postgresql"]) @pytest.mark.usefixtures("skip_by_db_engine") -def test_delete_duplicates_short_term( - recorder_db_url: str, caplog: pytest.LogCaptureFixture, tmp_path: Path +async def test_delete_duplicates_short_term( + async_test_recorder: RecorderInstanceGenerator, + caplog: pytest.LogCaptureFixture, + tmp_path: Path, ) -> None: """Test removal of duplicated statistics. @@ -637,37 +645,37 @@ def test_delete_duplicates_short_term( schema_version_postfix=SCHEMA_VERSION_POSTFIX, ), ), - get_test_home_assistant() as hass, ): - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - wait_recording_done(hass) - wait_recording_done(hass) + async with async_test_home_assistant() as hass, async_test_recorder(hass): + get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsMeta.from_meta(external_energy_metadata_1) - ) - with session_scope(hass=hass) as session: - session.add( - recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) - ) - session.add( - recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) - ) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsMeta.from_meta( + external_energy_metadata_1 + ) + ) + with session_scope(hass=hass) as session: + session.add( + recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) + ) + session.add( + recorder.db_schema.StatisticsShortTerm.from_stats(1, statistic_row) + ) - hass.stop() + await hass.async_stop() # Test that the duplicates are removed during migration from schema 23 - with get_test_home_assistant() as hass: - hass.config.config_dir = tmp_path - recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "recorder", {"recorder": {"db_url": recorder_db_url}}) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) - hass.stop() + async with ( + async_test_home_assistant(config_dir=tmp_path) as hass, + async_test_recorder(hass), + ): + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) + await hass.async_stop() assert "duplicated statistics rows" not in caplog.text assert "Found non identical" not in caplog.text diff --git a/tests/components/recorder/test_util.py b/tests/components/recorder/test_util.py index 04fe762c780..d850778d214 100644 --- a/tests/components/recorder/test_util.py +++ b/tests/components/recorder/test_util.py @@ -990,7 +990,7 @@ async def test_execute_stmt_lambda_element( all_calls = 0 class MockExecutor: - def __init__(self, stmt): + def __init__(self, stmt) -> None: assert isinstance(stmt, StatementLambdaElement) def all(self): diff --git a/tests/components/recorder/test_v32_migration.py b/tests/components/recorder/test_v32_migration.py index 5266e55851c..1006a03f4ec 100644 --- a/tests/components/recorder/test_v32_migration.py +++ b/tests/components/recorder/test_v32_migration.py @@ -19,6 +19,7 @@ from homeassistant.core import Event, EventOrigin, State import homeassistant.util.dt as dt_util from .common import async_wait_recording_done +from .conftest import instrument_migration from tests.common import async_test_home_assistant from tests.typing import RecorderInstanceGenerator @@ -637,6 +638,10 @@ async def test_out_of_disk_space_while_removing_foreign_key( This case tests the migration still happens if ix_states_event_id is removed from the states table. + + Note that the test is somewhat forced; the states.event_id foreign key constraint is + removed when migrating to schema version 46, inspecting the schema in + cleanup_legacy_states_event_ids is not likely to fail. """ importlib.import_module(SCHEMA_MODULE) old_db_schema = sys.modules[SCHEMA_MODULE] @@ -737,36 +742,52 @@ async def test_out_of_disk_space_while_removing_foreign_key( assert "ix_states_entity_id_last_updated_ts" in states_index_names - # Simulate out of disk space while removing the foreign key from the states table by - # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL - with ( - patch( - "homeassistant.components.recorder.migration.DropConstraint", - side_effect=OperationalError( - None, None, OSError("No space left on device") - ), - ), - ): - async with ( - async_test_home_assistant() as hass, - async_test_recorder(hass) as instance, - ): - await hass.async_block_till_done() + async with async_test_home_assistant() as hass: + with instrument_migration(hass) as instrumented_migration: + # Allow migration to start, but stall when live migration is completed + instrumented_migration.migration_stall.set() + instrumented_migration.live_migration_done_stall.clear() - # We need to wait for all the migration tasks to complete - # before we can check the database. - for _ in range(number_of_migrations): - await instance.async_block_till_done() - await async_wait_recording_done(hass) + async with async_test_recorder(hass, wait_recorder=False) as instance: + await hass.async_block_till_done() - states_indexes = await instance.async_add_executor_job( - _get_states_index_names - ) - states_index_names = {index["name"] for index in states_indexes} - assert instance.use_legacy_events_index is True - assert await instance.async_add_executor_job(_get_event_id_foreign_keys) + # Wait for live migration to complete + await hass.async_add_executor_job( + instrumented_migration.live_migration_done.wait + ) - await hass.async_stop() + # Simulate out of disk space while removing the foreign key from the states table by + # - patching DropConstraint to raise InternalError for MySQL and PostgreSQL + with ( + patch( + "homeassistant.components.recorder.migration.sqlalchemy.inspect", + side_effect=OperationalError( + None, None, OSError("No space left on device") + ), + ), + ): + instrumented_migration.live_migration_done_stall.set() + # We need to wait for all the migration tasks to complete + # before we can check the database. + for _ in range(number_of_migrations): + await instance.async_block_till_done() + await async_wait_recording_done(hass) + + states_indexes = await instance.async_add_executor_job( + _get_states_index_names + ) + states_index_names = {index["name"] for index in states_indexes} + assert instance.use_legacy_events_index is True + # The states.event_id foreign key constraint was removed when + # migration to schema version 46 + assert ( + await instance.async_add_executor_job( + _get_event_id_foreign_keys + ) + is None + ) + + await hass.async_stop() # Now run it again to verify the table rebuild tries again caplog.clear() diff --git a/tests/components/reddit/test_sensor.py b/tests/components/reddit/test_sensor.py index 52dac07d621..98cf2b79db3 100644 --- a/tests/components/reddit/test_sensor.py +++ b/tests/components/reddit/test_sensor.py @@ -66,7 +66,7 @@ INVALID_SORT_BY_CONFIG = { class ObjectView: """Use dict properties as attributes.""" - def __init__(self, d): + def __init__(self, d) -> None: """Set dict as internal dict.""" self.__dict__ = d diff --git a/tests/components/renault/test_config_flow.py b/tests/components/renault/test_config_flow.py index 7d40cf69314..69bfdf0842e 100644 --- a/tests/components/renault/test_config_flow.py +++ b/tests/components/renault/test_config_flow.py @@ -13,15 +13,12 @@ from homeassistant.components.renault.const import ( CONF_LOCALE, DOMAIN, ) -from homeassistant.config_entries import ConfigEntry from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import aiohttp_client -from .const import MOCK_CONFIG - -from tests.common import load_fixture +from tests.common import MockConfigEntry, load_fixture pytestmark = pytest.mark.usefixtures("mock_setup_entry") @@ -220,19 +217,11 @@ async def test_config_flow_duplicate( assert len(mock_setup_entry.mock_calls) == 0 -async def test_reauth(hass: HomeAssistant, config_entry: ConfigEntry) -> None: +async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test the start of the config flow.""" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "unique_id": config_entry.unique_id, - }, - data=MOCK_CONFIG, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["description_placeholders"] == {CONF_USERNAME: "email@test.com"} diff --git a/tests/components/reolink/conftest.py b/tests/components/reolink/conftest.py index c74cac76192..ddea36cb292 100644 --- a/tests/components/reolink/conftest.py +++ b/tests/components/reolink/conftest.py @@ -4,6 +4,7 @@ from collections.abc import Generator from unittest.mock import AsyncMock, MagicMock, patch import pytest +from reolink_aio.api import Chime from homeassistant.components.reolink import const from homeassistant.components.reolink.config_flow import DEFAULT_PROTOCOL @@ -52,10 +53,6 @@ def mock_setup_entry() -> Generator[AsyncMock]: def reolink_connect_class() -> Generator[MagicMock]: """Mock reolink connection and return both the host_mock and host_mock_class.""" with ( - patch( - "homeassistant.components.reolink.host.webhook.async_register", - return_value=True, - ), patch( "homeassistant.components.reolink.host.Host", autospec=True ) as host_mock_class, @@ -107,6 +104,14 @@ def reolink_connect_class() -> Generator[MagicMock]: host_mock.capabilities = {"Host": ["RTSP"], "0": ["motion_detection"]} host_mock.checked_api_versions = {"GetEvents": 1} host_mock.abilities = {"abilityChn": [{"aiTrack": {"permit": 0, "ver": 0}}]} + + # enums + host_mock.whiteled_mode.return_value = 1 + host_mock.whiteled_mode_list.return_value = ["off", "auto"] + host_mock.doorbell_led.return_value = "Off" + host_mock.doorbell_led_list.return_value = ["stayoff", "auto"] + host_mock.auto_track_method.return_value = 3 + host_mock.daynight_state.return_value = "Black&White" yield host_mock_class @@ -145,3 +150,26 @@ def config_entry(hass: HomeAssistant) -> MockConfigEntry: ) config_entry.add_to_hass(hass) return config_entry + + +@pytest.fixture +def test_chime(reolink_connect: MagicMock) -> None: + """Mock a reolink chime.""" + TEST_CHIME = Chime( + host=reolink_connect, + dev_id=12345678, + channel=0, + ) + TEST_CHIME.name = "Test chime" + TEST_CHIME.volume = 3 + TEST_CHIME.connect_state = 2 + TEST_CHIME.led_state = True + TEST_CHIME.event_info = { + "md": {"switch": 0, "musicId": 0}, + "people": {"switch": 0, "musicId": 1}, + "visitor": {"switch": 1, "musicId": 2}, + } + + reolink_connect.chime_list = [TEST_CHIME] + reolink_connect.chime.return_value = TEST_CHIME + return TEST_CHIME diff --git a/tests/components/reolink/test_binary_sensor.py b/tests/components/reolink/test_binary_sensor.py new file mode 100644 index 00000000000..e02742afe1d --- /dev/null +++ b/tests/components/reolink/test_binary_sensor.py @@ -0,0 +1,52 @@ +"""Test the Reolink binary sensor platform.""" + +from unittest.mock import MagicMock, patch + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.components.reolink import DEVICE_UPDATE_INTERVAL, const +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_OFF, STATE_ON, Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .conftest import TEST_NVR_NAME, TEST_UID + +from tests.common import MockConfigEntry, async_fire_time_changed +from tests.typing import ClientSessionGenerator + + +async def test_motion_sensor( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + freezer: FrozenDateTimeFactory, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test binary sensor entity with motion sensor.""" + reolink_connect.model = "Reolink Duo PoE" + reolink_connect.motion_detected.return_value = True + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.BINARY_SENSOR]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.BINARY_SENSOR}.{TEST_NVR_NAME}_motion_lens_0" + assert hass.states.get(entity_id).state == STATE_ON + + reolink_connect.motion_detected.return_value = False + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_OFF + + # test webhook callback + reolink_connect.motion_detected.return_value = True + reolink_connect.ONVIF_event_callback.return_value = [0] + webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + client = await hass_client_no_auth() + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + + assert hass.states.get(entity_id).state == STATE_ON diff --git a/tests/components/reolink/test_config_flow.py b/tests/components/reolink/test_config_flow.py index 6e57a7924e7..926baf324bc 100644 --- a/tests/components/reolink/test_config_flow.py +++ b/tests/components/reolink/test_config_flow.py @@ -1,10 +1,10 @@ """Test the Reolink config flow.""" -from datetime import timedelta import json from typing import Any from unittest.mock import AsyncMock, MagicMock, call +from freezegun.api import FrozenDateTimeFactory import pytest from reolink_aio.exceptions import ApiError, CredentialsInvalidError, ReolinkError @@ -25,7 +25,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers.device_registry import format_mac -from homeassistant.util.dt import utcnow from .conftest import ( DHCP_FORMATTED_MAC, @@ -328,16 +327,7 @@ async def test_reauth(hass: HomeAssistant, mock_setup_entry: MagicMock) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - const.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - "title_placeholders": {"name": TEST_NVR_NAME}, - "unique_id": format_mac(TEST_MAC), - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -439,6 +429,7 @@ async def test_dhcp_flow(hass: HomeAssistant, mock_setup_entry: MagicMock) -> No ) async def test_dhcp_ip_update( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, reolink_connect_class: MagicMock, reolink_connect: MagicMock, last_update_success: bool, @@ -471,10 +462,9 @@ async def test_dhcp_ip_update( if not last_update_success: # ensure the last_update_succes is False for the device_coordinator. - reolink_connect.get_states = AsyncMock(side_effect=ReolinkError("Test error")) - async_fire_time_changed( - hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(minutes=1) - ) + reolink_connect.get_states.side_effect = ReolinkError("Test error") + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() dhcp_data = dhcp.DhcpServiceInfo( diff --git a/tests/components/reolink/test_host.py b/tests/components/reolink/test_host.py new file mode 100644 index 00000000000..c4096a4582f --- /dev/null +++ b/tests/components/reolink/test_host.py @@ -0,0 +1,83 @@ +"""Test the Reolink host.""" + +from asyncio import CancelledError +from unittest.mock import AsyncMock, MagicMock + +from aiohttp import ClientResponseError +import pytest + +from homeassistant.components.reolink import const +from homeassistant.components.webhook import async_handle_webhook +from homeassistant.config_entries import ConfigEntryState +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er +from homeassistant.helpers.dispatcher import async_dispatcher_connect +from homeassistant.util.aiohttp import MockRequest + +from .conftest import TEST_UID + +from tests.common import MockConfigEntry +from tests.typing import ClientSessionGenerator + + +async def test_webhook_callback( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + entity_registry: er.EntityRegistry, +) -> None: + """Test webhook callback with motion sensor.""" + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + webhook_id = f"{const.DOMAIN}_{TEST_UID.replace(':', '')}_ONVIF" + + signal_all = MagicMock() + signal_ch = MagicMock() + async_dispatcher_connect(hass, f"{webhook_id}_all", signal_all) + async_dispatcher_connect(hass, f"{webhook_id}_0", signal_ch) + + client = await hass_client_no_auth() + + # test webhook callback success all channels + reolink_connect.ONVIF_event_callback.return_value = None + await client.post(f"/api/webhook/{webhook_id}") + signal_all.assert_called_once() + + # test webhook callback all channels with failure to read motion_state + signal_all.reset_mock() + reolink_connect.get_motion_state_all_ch.return_value = False + await client.post(f"/api/webhook/{webhook_id}") + signal_all.assert_not_called() + + # test webhook callback success single channel + reolink_connect.ONVIF_event_callback.return_value = [0] + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + signal_ch.assert_called_once() + + # test webhook callback single channel with error in event callback + signal_ch.reset_mock() + reolink_connect.ONVIF_event_callback.side_effect = Exception("Test error") + await client.post(f"/api/webhook/{webhook_id}", data="test_data") + signal_ch.assert_not_called() + + # test failure to read date from webhook post + request = MockRequest( + method="POST", + content=bytes("test", "utf-8"), + mock_source="test", + ) + request.read = AsyncMock(side_effect=ConnectionResetError("Test error")) + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() + + request.read = AsyncMock(side_effect=ClientResponseError("Test error", "Test")) + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() + + request.read = AsyncMock(side_effect=CancelledError("Test error")) + with pytest.raises(CancelledError): + await async_handle_webhook(hass, webhook_id, request) + signal_all.assert_not_called() diff --git a/tests/components/reolink/test_init.py b/tests/components/reolink/test_init.py index 4f745530b6b..fd54f298966 100644 --- a/tests/components/reolink/test_init.py +++ b/tests/components/reolink/test_init.py @@ -1,11 +1,12 @@ """Test the Reolink init.""" import asyncio -from datetime import timedelta from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch +from freezegun.api import FrozenDateTimeFactory import pytest +from reolink_aio.api import Chime from reolink_aio.exceptions import CredentialsInvalidError, ReolinkError from homeassistant.components.reolink import ( @@ -24,7 +25,6 @@ from homeassistant.helpers import ( issue_registry as ir, ) from homeassistant.setup import async_setup_component -from homeassistant.util.dt import utcnow from .conftest import ( TEST_CAM_MODEL, @@ -40,8 +40,10 @@ from tests.typing import WebSocketGenerator pytestmark = pytest.mark.usefixtures("reolink_connect", "reolink_platforms") +CHIME_MODEL = "Reolink Chime" -async def test_wait(*args, **key_args): + +async def test_wait(*args, **key_args) -> None: """Ensure a mocked function takes a bit of time to be able to timeout in test.""" await asyncio.sleep(0) @@ -101,52 +103,48 @@ async def test_failures_parametrized( async def test_firmware_error_twice( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, reolink_connect: MagicMock, config_entry: MockConfigEntry, ) -> None: """Test when the firmware update fails 2 times.""" - reolink_connect.check_new_firmware = AsyncMock( - side_effect=ReolinkError("Test error") - ) + reolink_connect.check_new_firmware.side_effect = ReolinkError("Test error") with patch("homeassistant.components.reolink.PLATFORMS", [Platform.UPDATE]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.UPDATE}.{TEST_NVR_NAME}_firmware" - assert hass.states.is_state(entity_id, STATE_OFF) + assert hass.states.get(entity_id).state == STATE_OFF - async_fire_time_changed( - hass, utcnow() + FIRMWARE_UPDATE_INTERVAL + timedelta(minutes=1) - ) + freezer.tick(FIRMWARE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.is_state(entity_id, STATE_UNAVAILABLE) + assert hass.states.get(entity_id).state == STATE_UNAVAILABLE async def test_credential_error_three( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, reolink_connect: MagicMock, config_entry: MockConfigEntry, issue_registry: ir.IssueRegistry, ) -> None: """Test when the update gives credential error 3 times.""" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED - reolink_connect.get_states = AsyncMock( - side_effect=CredentialsInvalidError("Test error") - ) + reolink_connect.get_states.side_effect = CredentialsInvalidError("Test error") issue_id = f"config_entry_reauth_{const.DOMAIN}_{config_entry.entry_id}" for _ in range(NUM_CRED_ERRORS): assert (HOMEASSISTANT_DOMAIN, issue_id) not in issue_registry.issues - async_fire_time_changed( - hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) - ) + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() assert (HOMEASSISTANT_DOMAIN, issue_id) in issue_registry.issues @@ -224,13 +222,9 @@ async def test_removing_disconnected_cams( device_models = [device.model for device in device_entries] assert sorted(device_models) == sorted([TEST_HOST_MODEL, TEST_CAM_MODEL]) - # reload integration after 'disconnecting' a camera. + # Try to remove the device after 'disconnecting' a camera. if attr is not None: setattr(reolink_connect, attr, value) - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): - assert await hass.config_entries.async_reload(config_entry.entry_id) - await hass.async_block_till_done() - expected_success = TEST_CAM_MODEL not in expected_models for device in device_entries: if device.model == TEST_CAM_MODEL: @@ -244,6 +238,79 @@ async def test_removing_disconnected_cams( assert sorted(device_models) == sorted(expected_models) +@pytest.mark.parametrize( + ("attr", "value", "expected_models"), + [ + ( + None, + None, + [TEST_HOST_MODEL, TEST_CAM_MODEL, CHIME_MODEL], + ), + ( + "connect_state", + -1, + [TEST_HOST_MODEL, TEST_CAM_MODEL], + ), + ( + "remove", + -1, + [TEST_HOST_MODEL, TEST_CAM_MODEL], + ), + ], +) +async def test_removing_chime( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + attr: str | None, + value: Any, + expected_models: list[str], +) -> None: + """Test removing a chime.""" + reolink_connect.channels = [0] + assert await async_setup_component(hass, "config", {}) + client = await hass_ws_client(hass) + # setup CH 0 and NVR switch entities/device + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SWITCH]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + device_models = [device.model for device in device_entries] + assert sorted(device_models) == sorted( + [TEST_HOST_MODEL, TEST_CAM_MODEL, CHIME_MODEL] + ) + + if attr == "remove": + + async def test_remove_chime(*args, **key_args): + """Remove chime.""" + test_chime.connect_state = -1 + + test_chime.remove = test_remove_chime + elif attr is not None: + setattr(test_chime, attr, value) + + # Try to remove the device after 'disconnecting' a chime. + expected_success = CHIME_MODEL not in expected_models + for device in device_entries: + if device.model == CHIME_MODEL: + response = await client.remove_device(device.id, config_entry.entry_id) + assert response["success"] == expected_success + + device_entries = dr.async_entries_for_config_entry( + device_registry, config_entry.entry_id + ) + device_models = [device.model for device in device_entries] + assert sorted(device_models) == sorted(expected_models) + + @pytest.mark.parametrize( ( "original_id", @@ -478,7 +545,7 @@ async def test_port_repair_issue( issue_registry: ir.IssueRegistry, ) -> None: """Test repairs issue is raised when auto enable of ports fails.""" - reolink_connect.set_net_port = AsyncMock(side_effect=ReolinkError("Test error")) + reolink_connect.set_net_port.side_effect = ReolinkError("Test error") reolink_connect.onvif_enabled = False reolink_connect.rtsp_enabled = False reolink_connect.rtmp_enabled = False diff --git a/tests/components/reolink/test_media_source.py b/tests/components/reolink/test_media_source.py index 66ed32ca823..31985bd10f7 100644 --- a/tests/components/reolink/test_media_source.py +++ b/tests/components/reolink/test_media_source.py @@ -130,7 +130,7 @@ async def test_browsing( ) -> None: """Test browsing the Reolink three.""" entry_id = config_entry.entry_id - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 reolink_connect.model = "Reolink TrackMix PoE" with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): @@ -162,7 +162,7 @@ async def test_browsing( browse_res_AT_sub_id = f"RES|{entry_id}|{TEST_CHANNEL}|autotrack_sub" browse_res_AT_main_id = f"RES|{entry_id}|{TEST_CHANNEL}|autotrack_main" assert browse.domain == DOMAIN - assert browse.title == TEST_NVR_NAME + assert browse.title == f"{TEST_NVR_NAME} lens 0" assert browse.identifier == browse_resolution_id assert browse.children[0].identifier == browse_res_sub_id assert browse.children[1].identifier == browse_res_main_id @@ -178,19 +178,19 @@ async def test_browsing( browse = await async_browse_media(hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_sub_id}") assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Low res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Low res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_AT_sub_id}" ) assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Autotrack low res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Autotrack low res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_AT_main_id}" ) assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} Autotrack high res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 Autotrack high res." browse = await async_browse_media( hass, f"{URI_SCHEME}{DOMAIN}/{browse_res_main_id}" @@ -200,7 +200,7 @@ async def test_browsing( browse_day_0_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY}" browse_day_1_id = f"DAY|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_YEAR}|{TEST_MONTH}|{TEST_DAY2}" assert browse.domain == DOMAIN - assert browse.title == f"{TEST_NVR_NAME} High res." + assert browse.title == f"{TEST_NVR_NAME} lens 0 High res." assert browse.identifier == browse_days_id assert browse.children[0].identifier == browse_day_0_id assert browse.children[1].identifier == browse_day_1_id @@ -220,7 +220,8 @@ async def test_browsing( browse_file_id = f"FILE|{entry_id}|{TEST_CHANNEL}|{TEST_STREAM}|{TEST_FILE_NAME}" assert browse.domain == DOMAIN assert ( - browse.title == f"{TEST_NVR_NAME} High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" + browse.title + == f"{TEST_NVR_NAME} lens 0 High res. {TEST_YEAR}/{TEST_MONTH}/{TEST_DAY}" ) assert browse.identifier == browse_files_id assert browse.children[0].identifier == browse_file_id @@ -272,10 +273,10 @@ async def test_browsing_rec_playback_unsupported( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera which does not support playback of recordings.""" - reolink_connect.api_version.return_value = 0 + reolink_connect.supported.return_value = 0 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # browse root @@ -293,10 +294,10 @@ async def test_browsing_errors( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera errors.""" - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() # browse root @@ -312,13 +313,13 @@ async def test_browsing_not_loaded( config_entry: MockConfigEntry, ) -> None: """Test browsing a Reolink camera integration which is not loaded.""" - reolink_connect.api_version.return_value = 1 + reolink_connect.supported.return_value = 1 with patch("homeassistant.components.reolink.PLATFORMS", [Platform.CAMERA]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - reolink_connect.get_host_data = AsyncMock(side_effect=ReolinkError("Test error")) + reolink_connect.get_host_data.side_effect = ReolinkError("Test error") config_entry2 = MockConfigEntry( domain=const.DOMAIN, unique_id=format_mac(TEST_MAC2), diff --git a/tests/components/reolink/test_number.py b/tests/components/reolink/test_number.py new file mode 100644 index 00000000000..e9abcec946c --- /dev/null +++ b/tests/components/reolink/test_number.py @@ -0,0 +1,111 @@ +"""Test the Reolink number platform.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.number import ( + ATTR_VALUE, + DOMAIN as NUMBER_DOMAIN, + SERVICE_SET_VALUE, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_ENTITY_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +async def test_number( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test number entity with volume.""" + reolink_connect.volume.return_value = 80 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.{TEST_NVR_NAME}_volume" + + assert hass.states.get(entity_id).state == "80" + + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + reolink_connect.set_volume.assert_called_with(0, volume=50) + + reolink_connect.set_volume.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + reolink_connect.set_volume.side_effect = InvalidParameterError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 50}, + blocking=True, + ) + + +async def test_chime_number( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, +) -> None: + """Test number entity of a chime with chime volume.""" + test_chime.volume = 3 + + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.NUMBER]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.NUMBER}.test_chime_volume" + + assert hass.states.get(entity_id).state == "3" + + test_chime.set_option = AsyncMock() + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 2}, + blocking=True, + ) + test_chime.set_option.assert_called_with(volume=2) + + test_chime.set_option.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 1}, + blocking=True, + ) + + test_chime.set_option.side_effect = InvalidParameterError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + NUMBER_DOMAIN, + SERVICE_SET_VALUE, + {ATTR_ENTITY_ID: entity_id, ATTR_VALUE: 1}, + blocking=True, + ) diff --git a/tests/components/reolink/test_select.py b/tests/components/reolink/test_select.py index 53c1e494b3d..0534f36f4c5 100644 --- a/tests/components/reolink/test_select.py +++ b/tests/components/reolink/test_select.py @@ -1,8 +1,8 @@ """Test the Reolink select platform.""" -from datetime import timedelta from unittest.mock import AsyncMock, MagicMock, patch +from freezegun.api import FrozenDateTimeFactory import pytest from reolink_aio.api import Chime from reolink_aio.exceptions import InvalidParameterError, ReolinkError @@ -19,7 +19,6 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er -from homeassistant.util.dt import utcnow from .conftest import TEST_NVR_NAME @@ -28,22 +27,20 @@ from tests.common import MockConfigEntry, async_fire_time_changed async def test_floodlight_mode_select( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, config_entry: MockConfigEntry, reolink_connect: MagicMock, entity_registry: er.EntityRegistry, ) -> None: """Test select entity with floodlight_mode.""" - reolink_connect.whiteled_mode.return_value = 1 - reolink_connect.whiteled_mode_list.return_value = ["off", "auto"] with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_floodlight_mode" - assert hass.states.is_state(entity_id, "auto") + assert hass.states.get(entity_id).state == "auto" - reolink_connect.set_whiteled = AsyncMock() await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, @@ -52,7 +49,7 @@ async def test_floodlight_mode_select( ) reolink_connect.set_whiteled.assert_called_once() - reolink_connect.set_whiteled = AsyncMock(side_effect=ReolinkError("Test error")) + reolink_connect.set_whiteled.side_effect = ReolinkError("Test error") with pytest.raises(HomeAssistantError): await hass.services.async_call( SELECT_DOMAIN, @@ -61,9 +58,7 @@ async def test_floodlight_mode_select( blocking=True, ) - reolink_connect.set_whiteled = AsyncMock( - side_effect=InvalidParameterError("Test error") - ) + reolink_connect.set_whiteled.side_effect = InvalidParameterError("Test error") with pytest.raises(ServiceValidationError): await hass.services.async_call( SELECT_DOMAIN, @@ -72,6 +67,13 @@ async def test_floodlight_mode_select( blocking=True, ) + reolink_connect.whiteled_mode.return_value = -99 # invalid value + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_UNKNOWN + async def test_play_quick_reply_message( hass: HomeAssistant, @@ -82,14 +84,13 @@ async def test_play_quick_reply_message( """Test select play_quick_reply_message entity.""" reolink_connect.quick_reply_dict.return_value = {0: "off", 1: "test message"} with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.SELECT}.{TEST_NVR_NAME}_play_quick_reply_message" - assert hass.states.is_state(entity_id, STATE_UNKNOWN) + assert hass.states.get(entity_id).state == STATE_UNKNOWN - reolink_connect.play_quick_reply = AsyncMock() await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, @@ -101,46 +102,33 @@ async def test_play_quick_reply_message( async def test_chime_select( hass: HomeAssistant, + freezer: FrozenDateTimeFactory, config_entry: MockConfigEntry, reolink_connect: MagicMock, + test_chime: Chime, entity_registry: er.EntityRegistry, ) -> None: """Test chime select entity.""" - TEST_CHIME = Chime( - host=reolink_connect, - dev_id=12345678, - channel=0, - ) - TEST_CHIME.name = "Test chime" - TEST_CHIME.volume = 3 - TEST_CHIME.led_state = True - TEST_CHIME.event_info = { - "md": {"switch": 0, "musicId": 0}, - "people": {"switch": 0, "musicId": 1}, - "visitor": {"switch": 1, "musicId": 2}, - } - - reolink_connect.chime_list = [TEST_CHIME] - with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): - assert await hass.config_entries.async_setup(config_entry.entry_id) is True + assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() assert config_entry.state is ConfigEntryState.LOADED entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" - assert hass.states.is_state(entity_id, "pianokey") + assert hass.states.get(entity_id).state == "pianokey" - TEST_CHIME.set_tone = AsyncMock() + # Test selecting chime ringtone option + test_chime.set_tone = AsyncMock() await hass.services.async_call( SELECT_DOMAIN, SERVICE_SELECT_OPTION, {ATTR_ENTITY_ID: entity_id, "option": "off"}, blocking=True, ) - TEST_CHIME.set_tone.assert_called_once() + test_chime.set_tone.assert_called_once() - TEST_CHIME.set_tone = AsyncMock(side_effect=ReolinkError("Test error")) + test_chime.set_tone.side_effect = ReolinkError("Test error") with pytest.raises(HomeAssistantError): await hass.services.async_call( SELECT_DOMAIN, @@ -149,7 +137,7 @@ async def test_chime_select( blocking=True, ) - TEST_CHIME.set_tone = AsyncMock(side_effect=InvalidParameterError("Test error")) + test_chime.set_tone.side_effect = InvalidParameterError("Test error") with pytest.raises(ServiceValidationError): await hass.services.async_call( SELECT_DOMAIN, @@ -158,10 +146,10 @@ async def test_chime_select( blocking=True, ) - TEST_CHIME.event_info = {} - async_fire_time_changed( - hass, utcnow() + DEVICE_UPDATE_INTERVAL + timedelta(seconds=30) - ) + # Test unavailable + test_chime.event_info = {} + freezer.tick(DEVICE_UPDATE_INTERVAL) + async_fire_time_changed(hass) await hass.async_block_till_done() - assert hass.states.is_state(entity_id, STATE_UNKNOWN) + assert hass.states.get(entity_id).state == STATE_UNKNOWN diff --git a/tests/components/reolink/test_services.py b/tests/components/reolink/test_services.py new file mode 100644 index 00000000000..a4b7d8f0da4 --- /dev/null +++ b/tests/components/reolink/test_services.py @@ -0,0 +1,116 @@ +"""Test the Reolink services.""" + +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.api import Chime +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.reolink.const import DOMAIN as REOLINK_DOMAIN +from homeassistant.components.reolink.services import ATTR_RINGTONE +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ATTR_DEVICE_ID, Platform +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er + +from tests.common import MockConfigEntry + + +async def test_play_chime_service_entity( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, + entity_registry: er.EntityRegistry, +) -> None: + """Test chime play service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" + entity = entity_registry.async_get(entity_id) + assert entity is not None + device_id = entity.device_id + + # Test chime play service with device + test_chime.play = AsyncMock() + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + test_chime.play.assert_called_once() + + # Test errors + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: ["invalid_id"], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + + test_chime.play = AsyncMock(side_effect=ReolinkError("Test error")) + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + + test_chime.play = AsyncMock(side_effect=InvalidParameterError("Test error")) + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + + reolink_connect.chime.return_value = None + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) + + +async def test_play_chime_service_unloaded( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + test_chime: Chime, + entity_registry: er.EntityRegistry, +) -> None: + """Test chime play service when config entry is unloaded.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SELECT]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SELECT}.test_chime_visitor_ringtone" + entity = entity_registry.async_get(entity_id) + assert entity is not None + device_id = entity.device_id + + # Unload the config entry + assert await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED + + # Test chime play service + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + REOLINK_DOMAIN, + "play_chime", + {ATTR_DEVICE_ID: [device_id], ATTR_RINGTONE: "attraction"}, + blocking=True, + ) diff --git a/tests/components/reolink/test_siren.py b/tests/components/reolink/test_siren.py new file mode 100644 index 00000000000..0d9d3e0b800 --- /dev/null +++ b/tests/components/reolink/test_siren.py @@ -0,0 +1,134 @@ +"""Test the Reolink siren platform.""" + +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from reolink_aio.exceptions import InvalidParameterError, ReolinkError + +from homeassistant.components.siren import ( + ATTR_DURATION, + ATTR_VOLUME_LEVEL, + DOMAIN as SIREN_DOMAIN, +) +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_TURN_OFF, + SERVICE_TURN_ON, + STATE_UNKNOWN, + Platform, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from .conftest import TEST_NVR_NAME + +from tests.common import MockConfigEntry + + +async def test_siren( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test siren entity.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" + assert hass.states.get(entity_id).state == STATE_UNKNOWN + + # test siren turn on + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_volume.assert_not_called() + reolink_connect.set_siren.assert_called_with(0, True, None) + + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_VOLUME_LEVEL: 0.85, ATTR_DURATION: 2}, + blocking=True, + ) + reolink_connect.set_volume.assert_called_with(0, volume=85) + reolink_connect.set_siren.assert_called_with(0, True, 2) + + # test siren turn off + reolink_connect.set_siren.side_effect = None + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + reolink_connect.set_siren.assert_called_with(0, False, None) + + +@pytest.mark.parametrize("attr", ["set_volume", "set_siren"]) +@pytest.mark.parametrize( + ("value", "expected"), + [ + ( + AsyncMock(side_effect=ReolinkError("Test error")), + HomeAssistantError, + ), + ( + AsyncMock(side_effect=InvalidParameterError("Test error")), + ServiceValidationError, + ), + ], +) +async def test_siren_turn_on_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, + attr: str, + value: Any, + expected: Any, +) -> None: + """Test errors when calling siren turn on service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" + + setattr(reolink_connect, attr, value) + with pytest.raises(expected): + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_ON, + {ATTR_ENTITY_ID: entity_id, ATTR_VOLUME_LEVEL: 0.85, ATTR_DURATION: 2}, + blocking=True, + ) + + +async def test_siren_turn_off_errors( + hass: HomeAssistant, + config_entry: MockConfigEntry, + reolink_connect: MagicMock, +) -> None: + """Test errors when calling siren turn off service.""" + with patch("homeassistant.components.reolink.PLATFORMS", [Platform.SIREN]): + assert await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + entity_id = f"{Platform.SIREN}.{TEST_NVR_NAME}_siren" + + reolink_connect.set_siren.side_effect = ReolinkError("Test error") + with pytest.raises(HomeAssistantError): + await hass.services.async_call( + SIREN_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) diff --git a/tests/components/repairs/test_websocket_api.py b/tests/components/repairs/test_websocket_api.py index dcc6932cf4a..bb3d50f9eb5 100644 --- a/tests/components/repairs/test_websocket_api.py +++ b/tests/components/repairs/test_websocket_api.py @@ -18,7 +18,11 @@ from homeassistant.helpers import issue_registry as ir from homeassistant.setup import async_setup_component from tests.common import MockUser, mock_platform -from tests.typing import ClientSessionGenerator, WebSocketGenerator +from tests.typing import ( + ClientSessionGenerator, + MockHAClientWebSocket, + WebSocketGenerator, +) DEFAULT_ISSUES = [ { @@ -34,7 +38,11 @@ DEFAULT_ISSUES = [ ] -async def create_issues(hass, ws_client, issues=None): +async def create_issues( + hass: HomeAssistant, + ws_client: MockHAClientWebSocket, + issues: list[dict[str, Any]] | None = None, +) -> list[dict[str, Any]]: """Create issues.""" def api_issue(issue): @@ -119,7 +127,11 @@ async def mock_repairs_integration(hass: HomeAssistant) -> None: """Mock a repairs integration.""" hass.config.components.add("fake_integration") - def async_create_fix_flow(hass, issue_id, data): + def async_create_fix_flow( + hass: HomeAssistant, + issue_id: str, + data: dict[str, str | int | float | None] | None, + ) -> RepairsFlow: assert issue_id in EXPECTED_DATA assert data == EXPECTED_DATA[issue_id] diff --git a/tests/components/rfxtrx/conftest.py b/tests/components/rfxtrx/conftest.py index 88450638d6c..be5c72e6483 100644 --- a/tests/components/rfxtrx/conftest.py +++ b/tests/components/rfxtrx/conftest.py @@ -2,7 +2,9 @@ from __future__ import annotations -from unittest.mock import Mock, patch +from collections.abc import Callable, Coroutine, Generator +from typing import Any +from unittest.mock import MagicMock, Mock, patch from freezegun import freeze_time import pytest @@ -67,7 +69,7 @@ async def setup_rfx_test_cfg( @pytest.fixture(autouse=True) -async def transport_mock(hass): +def transport_mock() -> Generator[Mock]: """Fixture that make sure all transports are fake.""" transport = Mock(spec=RFXtrxTransport) with ( @@ -78,14 +80,14 @@ async def transport_mock(hass): @pytest.fixture(autouse=True) -async def connect_mock(hass): +def connect_mock() -> Generator[MagicMock]: """Fixture that make sure connect class is mocked.""" with patch("RFXtrx.Connect") as connect: yield connect @pytest.fixture(autouse=True, name="rfxtrx") -def rfxtrx_fixture(hass, connect_mock): +def rfxtrx_fixture(hass: HomeAssistant, connect_mock: MagicMock) -> Mock: """Fixture that cleans up threads from integration.""" rfx = Mock(spec=Connect) @@ -114,19 +116,21 @@ def rfxtrx_fixture(hass, connect_mock): @pytest.fixture(name="rfxtrx_automatic") -async def rfxtrx_automatic_fixture(hass, rfxtrx): +async def rfxtrx_automatic_fixture(hass: HomeAssistant, rfxtrx: Mock) -> Mock: """Fixture that starts up with automatic additions.""" await setup_rfx_test_cfg(hass, automatic_add=True, devices={}) return rfxtrx @pytest.fixture -async def timestep(hass): +def timestep( + hass: HomeAssistant, +) -> Generator[Callable[[int], Coroutine[Any, Any, None]]]: """Step system time forward.""" with freeze_time(utcnow()) as frozen_time: - async def delay(seconds): + async def delay(seconds: int) -> None: """Trigger delay in system.""" frozen_time.tick(delta=seconds) async_fire_time_changed(hass) diff --git a/tests/components/rfxtrx/test_config_flow.py b/tests/components/rfxtrx/test_config_flow.py index b61440c31b6..1e23bdaf982 100644 --- a/tests/components/rfxtrx/test_config_flow.py +++ b/tests/components/rfxtrx/test_config_flow.py @@ -29,7 +29,9 @@ def com_port(): return port -async def start_options_flow(hass, entry): +async def start_options_flow( + hass: HomeAssistant, entry: MockConfigEntry +) -> config_entries.ConfigFlowResult: """Start the options flow with the entry under test.""" entry.add_to_hass(hass) diff --git a/tests/components/rfxtrx/test_device_action.py b/tests/components/rfxtrx/test_device_action.py index c678f2dfc62..a3522934c57 100644 --- a/tests/components/rfxtrx/test_device_action.py +++ b/tests/components/rfxtrx/test_device_action.py @@ -47,7 +47,7 @@ async def test_device_test_data(rfxtrx, device: DeviceTestData) -> None: } -async def setup_entry(hass, devices): +async def setup_entry(hass: HomeAssistant, devices: dict[str, Any]) -> None: """Construct a config setup.""" entry_data = create_rfx_test_cfg(devices=devices) mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data) @@ -79,7 +79,10 @@ def _get_expected_actions(data): ], ) async def test_get_actions( - hass: HomeAssistant, device_registry: dr.DeviceRegistry, device, expected + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + device: DeviceTestData, + expected, ) -> None: """Test we get the expected actions from a rfxtrx.""" await setup_entry(hass, {device.code: {}}) @@ -136,7 +139,7 @@ async def test_action( hass: HomeAssistant, device_registry: dr.DeviceRegistry, rfxtrx: RFXtrx.Connect, - device, + device: DeviceTestData, config, expected, ) -> None: diff --git a/tests/components/rfxtrx/test_device_trigger.py b/tests/components/rfxtrx/test_device_trigger.py index 38f7cccc072..9c56951761b 100644 --- a/tests/components/rfxtrx/test_device_trigger.py +++ b/tests/components/rfxtrx/test_device_trigger.py @@ -46,7 +46,7 @@ EVENT_FIREALARM_1 = EventTestData( ) -async def setup_entry(hass, devices): +async def setup_entry(hass: HomeAssistant, devices: dict[str, Any]) -> None: """Construct a config setup.""" entry_data = create_rfx_test_cfg(devices=devices) mock_entry = MockConfigEntry(domain="rfxtrx", unique_id=DOMAIN, data=entry_data) diff --git a/tests/components/ridwell/conftest.py b/tests/components/ridwell/conftest.py index 32907ac8037..6ea9d91f8e9 100644 --- a/tests/components/ridwell/conftest.py +++ b/tests/components/ridwell/conftest.py @@ -1,6 +1,8 @@ """Define test fixtures for Ridwell.""" +from collections.abc import Generator from datetime import date +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aioridwell.model import EventState, RidwellPickup, RidwellPickupEvent @@ -8,6 +10,7 @@ import pytest from homeassistant.components.ridwell.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -19,7 +22,7 @@ TEST_USER_ID = "12345" @pytest.fixture(name="account") -def account_fixture(): +def account_fixture() -> Mock: """Define a Ridwell account.""" return Mock( account_id=TEST_ACCOUNT_ID, @@ -44,7 +47,7 @@ def account_fixture(): @pytest.fixture(name="client") -def client_fixture(account): +def client_fixture(account: Mock) -> Mock: """Define an aioridwell client.""" return Mock( async_authenticate=AsyncMock(), @@ -55,7 +58,9 @@ def client_fixture(account): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -68,7 +73,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(hass): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_USERNAME: TEST_USERNAME, @@ -77,7 +82,7 @@ def config_fixture(hass): @pytest.fixture(name="mock_aioridwell") -async def mock_aioridwell_fixture(hass, client, config): +def mock_aioridwell_fixture(client: Mock, config: dict[str, Any]) -> Generator[None]: """Define a fixture to patch aioridwell.""" with ( patch( @@ -93,7 +98,9 @@ async def mock_aioridwell_fixture(hass, client, config): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_aioridwell): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_aioridwell: None +) -> None: """Define a fixture to set up ridwell.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/ridwell/test_config_flow.py b/tests/components/ridwell/test_config_flow.py index 601ac182670..6dd00344c5b 100644 --- a/tests/components/ridwell/test_config_flow.py +++ b/tests/components/ridwell/test_config_flow.py @@ -13,6 +13,8 @@ from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_USERNAME +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("get_client_response", "errors"), @@ -65,12 +67,10 @@ async def test_duplicate_error(hass: HomeAssistant, config, setup_config_entry) async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test a full reauth flow.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "new_password"}, diff --git a/tests/components/ring/common.py b/tests/components/ring/common.py index b129623aa95..3b78adf0e09 100644 --- a/tests/components/ring/common.py +++ b/tests/components/ring/common.py @@ -3,12 +3,14 @@ from unittest.mock import patch from homeassistant.components.ring import DOMAIN +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry -async def setup_platform(hass, platform): +async def setup_platform(hass: HomeAssistant, platform: Platform) -> None: """Set up the ring platform and prerequisites.""" MockConfigEntry(domain=DOMAIN, data={"username": "foo", "token": {}}).add_to_hass( hass diff --git a/tests/components/ring/conftest.py b/tests/components/ring/conftest.py index cd4447c1a9a..4456a9daa26 100644 --- a/tests/components/ring/conftest.py +++ b/tests/components/ring/conftest.py @@ -26,13 +26,23 @@ def mock_setup_entry() -> Generator[AsyncMock]: yield mock_setup_entry +@pytest.fixture +def mock_ring_init_auth_class(): + """Mock ring_doorbell.Auth in init and return the mock class.""" + with patch("homeassistant.components.ring.Auth", autospec=True) as mock_ring_auth: + mock_ring_auth.return_value.async_fetch_token.return_value = { + "access_token": "mock-token" + } + yield mock_ring_auth + + @pytest.fixture def mock_ring_auth(): """Mock ring_doorbell.Auth.""" with patch( "homeassistant.components.ring.config_flow.Auth", autospec=True ) as mock_ring_auth: - mock_ring_auth.return_value.fetch_token.return_value = { + mock_ring_auth.return_value.async_fetch_token.return_value = { "access_token": "mock-token" } yield mock_ring_auth.return_value diff --git a/tests/components/ring/device_mocks.py b/tests/components/ring/device_mocks.py index 88ad37bdd36..d2671c3896d 100644 --- a/tests/components/ring/device_mocks.py +++ b/tests/components/ring/device_mocks.py @@ -10,7 +10,7 @@ Mocks the api calls on the devices such as history() and health(). from copy import deepcopy from datetime import datetime from time import time -from unittest.mock import MagicMock +from unittest.mock import AsyncMock, MagicMock from ring_doorbell import ( RingCapability, @@ -132,18 +132,18 @@ def _mocked_ring_device(device_dict, device_family, device_class, capabilities): # Configure common methods mock_device.has_capability.side_effect = has_capability - mock_device.update_health_data.side_effect = lambda: update_health_data( + mock_device.async_update_health_data.side_effect = lambda: update_health_data( DOORBOT_HEALTH if device_family != "chimes" else CHIME_HEALTH ) # Configure methods based on capability if has_capability(RingCapability.HISTORY): mock_device.configure_mock(last_history=[]) - mock_device.history.side_effect = lambda *_, **__: update_history_data( + mock_device.async_history.side_effect = lambda *_, **__: update_history_data( DOORBOT_HISTORY if device_family != "other" else INTERCOM_HISTORY ) if has_capability(RingCapability.VIDEO): - mock_device.recording_url = MagicMock(return_value="http://dummy.url") + mock_device.async_recording_url = AsyncMock(return_value="http://dummy.url") if has_capability(RingCapability.MOTION_DETECTION): mock_device.configure_mock( diff --git a/tests/components/ring/test_button.py b/tests/components/ring/test_button.py index 6fef3295159..946a893c8ad 100644 --- a/tests/components/ring/test_button.py +++ b/tests/components/ring/test_button.py @@ -28,11 +28,11 @@ async def test_button_opens_door( await setup_platform(hass, Platform.BUTTON) mock_intercom = mock_ring_devices.get_device(185036587) - mock_intercom.open_door.assert_not_called() + mock_intercom.async_open_door.assert_not_called() await hass.services.async_call( "button", "press", {"entity_id": "button.ingress_open_door"}, blocking=True ) await hass.async_block_till_done(wait_background_tasks=True) - mock_intercom.open_door.assert_called_once() + mock_intercom.async_open_door.assert_called_once() diff --git a/tests/components/ring/test_camera.py b/tests/components/ring/test_camera.py index 49b7dc10f05..619fb52846c 100644 --- a/tests/components/ring/test_camera.py +++ b/tests/components/ring/test_camera.py @@ -1,6 +1,6 @@ """The tests for the Ring switch platform.""" -from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch +from unittest.mock import AsyncMock, patch from aiohttp.test_utils import make_mocked_request from freezegun.api import FrozenDateTimeFactory @@ -180,8 +180,7 @@ async def test_motion_detection_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_camera_mock = mock_ring_devices.get_device(765432) - p = PropertyMock(side_effect=exception_type) - type(front_camera_mock).motion_detection = p + front_camera_mock.async_set_motion_detection.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -191,7 +190,7 @@ async def test_motion_detection_errors_when_turned_on( blocking=True, ) await hass.async_block_till_done() - p.assert_called_once() + front_camera_mock.async_set_motion_detection.assert_called_once() assert ( any( flow @@ -212,7 +211,7 @@ async def test_camera_handle_mjpeg_stream( await setup_platform(hass, Platform.CAMERA) front_camera_mock = mock_ring_devices.get_device(765432) - front_camera_mock.recording_url.return_value = None + front_camera_mock.async_recording_url.return_value = None state = hass.states.get("camera.front") assert state is not None @@ -220,8 +219,8 @@ async def test_camera_handle_mjpeg_stream( mock_request = make_mocked_request("GET", "/", headers={"token": "x"}) # history not updated yet - front_camera_mock.history.assert_not_called() - front_camera_mock.recording_url.assert_not_called() + front_camera_mock.async_history.assert_not_called() + front_camera_mock.async_recording_url.assert_not_called() stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") assert stream is None @@ -229,30 +228,30 @@ async def test_camera_handle_mjpeg_stream( freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.history.assert_called_once() - front_camera_mock.recording_url.assert_called_once() + front_camera_mock.async_history.assert_called_once() + front_camera_mock.async_recording_url.assert_called_once() stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") assert stream is None # Stop the history updating so we can update the values manually - front_camera_mock.history = MagicMock() + front_camera_mock.async_history = AsyncMock() front_camera_mock.last_history[0]["recording"]["status"] = "not ready" freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.recording_url.assert_called_once() + front_camera_mock.async_recording_url.assert_called_once() stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") assert stream is None # If the history id hasn't changed the camera will not check again for the video url # until the FORCE_REFRESH_INTERVAL has passed front_camera_mock.last_history[0]["recording"]["status"] = "ready" - front_camera_mock.recording_url = MagicMock(return_value="http://dummy.url") + front_camera_mock.async_recording_url = AsyncMock(return_value="http://dummy.url") freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.recording_url.assert_not_called() + front_camera_mock.async_recording_url.assert_not_called() stream = await camera.async_get_mjpeg_stream(hass, mock_request, "camera.front") assert stream is None @@ -260,7 +259,7 @@ async def test_camera_handle_mjpeg_stream( freezer.tick(FORCE_REFRESH_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_camera_mock.recording_url.assert_called_once() + front_camera_mock.async_recording_url.assert_called_once() # Now the stream should be returned stream_reader = MockStreamReader(SMALLEST_VALID_JPEG_BYTES) @@ -290,8 +289,8 @@ async def test_camera_image( assert state is not None # history not updated yet - front_camera_mock.history.assert_not_called() - front_camera_mock.recording_url.assert_not_called() + front_camera_mock.async_history.assert_not_called() + front_camera_mock.async_recording_url.assert_not_called() with ( patch( "homeassistant.components.ring.camera.ffmpeg.async_get_image", @@ -305,8 +304,8 @@ async def test_camera_image( async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) # history updated so image available - front_camera_mock.history.assert_called_once() - front_camera_mock.recording_url.assert_called_once() + front_camera_mock.async_history.assert_called_once() + front_camera_mock.async_recording_url.assert_called_once() with patch( "homeassistant.components.ring.camera.ffmpeg.async_get_image", diff --git a/tests/components/ring/test_config_flow.py b/tests/components/ring/test_config_flow.py index 2420bb9cc50..bbaec2e37c4 100644 --- a/tests/components/ring/test_config_flow.py +++ b/tests/components/ring/test_config_flow.py @@ -57,7 +57,7 @@ async def test_form_error( result = await hass.config_entries.flow.async_init( DOMAIN, context={"source": config_entries.SOURCE_USER} ) - mock_ring_auth.fetch_token.side_effect = error_type + mock_ring_auth.async_fetch_token.side_effect = error_type result2 = await hass.config_entries.flow.async_configure( result["flow_id"], {"username": "hello@home-assistant.io", "password": "test-password"}, @@ -79,7 +79,7 @@ async def test_form_2fa( assert result["type"] is FlowResultType.FORM assert result["errors"] == {} - mock_ring_auth.fetch_token.side_effect = ring_doorbell.Requires2FAError + mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError result2 = await hass.config_entries.flow.async_configure( result["flow_id"], { @@ -88,20 +88,20 @@ async def test_form_2fa( }, ) await hass.async_block_till_done() - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "fake-password", None ) assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "2fa" - mock_ring_auth.fetch_token.reset_mock(side_effect=True) - mock_ring_auth.fetch_token.return_value = "new-foobar" + mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) + mock_ring_auth.async_fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={"2fa": "123456"}, ) - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "fake-password", "123456" ) assert result3["type"] is FlowResultType.CREATE_ENTRY @@ -128,7 +128,7 @@ async def test_reauth( [result] = flows assert result["step_id"] == "reauth_confirm" - mock_ring_auth.fetch_token.side_effect = ring_doorbell.Requires2FAError + mock_ring_auth.async_fetch_token.side_effect = ring_doorbell.Requires2FAError result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -136,19 +136,19 @@ async def test_reauth( }, ) - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", None ) assert result2["type"] is FlowResultType.FORM assert result2["step_id"] == "2fa" - mock_ring_auth.fetch_token.reset_mock(side_effect=True) - mock_ring_auth.fetch_token.return_value = "new-foobar" + mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) + mock_ring_auth.async_fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={"2fa": "123456"}, ) - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", "123456" ) assert result3["type"] is FlowResultType.ABORT @@ -185,7 +185,7 @@ async def test_reauth_error( [result] = flows assert result["step_id"] == "reauth_confirm" - mock_ring_auth.fetch_token.side_effect = error_type + mock_ring_auth.async_fetch_token.side_effect = error_type result2 = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -194,15 +194,15 @@ async def test_reauth_error( ) await hass.async_block_till_done() - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "error_fake_password", None ) assert result2["type"] is FlowResultType.FORM assert result2["errors"] == {"base": errors_msg} # Now test reauth can go on to succeed - mock_ring_auth.fetch_token.reset_mock(side_effect=True) - mock_ring_auth.fetch_token.return_value = "new-foobar" + mock_ring_auth.async_fetch_token.reset_mock(side_effect=True) + mock_ring_auth.async_fetch_token.return_value = "new-foobar" result3 = await hass.config_entries.flow.async_configure( result2["flow_id"], user_input={ @@ -210,7 +210,7 @@ async def test_reauth_error( }, ) - mock_ring_auth.fetch_token.assert_called_once_with( + mock_ring_auth.async_fetch_token.assert_called_once_with( "foo@bar.com", "other_fake_password", None ) assert result3["type"] is FlowResultType.ABORT diff --git a/tests/components/ring/test_init.py b/tests/components/ring/test_init.py index d8529e874b9..4ab3e1bd366 100644 --- a/tests/components/ring/test_init.py +++ b/tests/components/ring/test_init.py @@ -10,7 +10,7 @@ from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN from homeassistant.components.ring import DOMAIN from homeassistant.components.ring.const import SCAN_INTERVAL from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState -from homeassistant.const import CONF_USERNAME +from homeassistant.const import CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er, issue_registry as ir from homeassistant.setup import async_setup_component @@ -42,11 +42,11 @@ async def test_setup_entry_device_update( """Test devices are updating after setup entry.""" front_door_doorbell = mock_ring_devices.get_device(987654) - front_door_doorbell.history.assert_not_called() + front_door_doorbell.async_history.assert_not_called() freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) await hass.async_block_till_done(wait_background_tasks=True) - front_door_doorbell.history.assert_called_once() + front_door_doorbell.async_history.assert_called_once() async def test_auth_failed_on_setup( @@ -56,7 +56,7 @@ async def test_auth_failed_on_setup( ) -> None: """Test auth failure on setup entry.""" mock_config_entry.add_to_hass(hass) - mock_ring_client.update_data.side_effect = AuthenticationError + mock_ring_client.async_update_data.side_effect = AuthenticationError assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) await hass.config_entries.async_setup(mock_config_entry.entry_id) @@ -90,7 +90,7 @@ async def test_error_on_setup( """Test non-auth errors on setup entry.""" mock_config_entry.add_to_hass(hass) - mock_ring_client.update_data.side_effect = error_type + mock_ring_client.async_update_data.side_effect = error_type await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() @@ -113,7 +113,7 @@ async def test_auth_failure_on_global_update( await hass.async_block_till_done() assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) - mock_ring_client.update_devices.side_effect = AuthenticationError + mock_ring_client.async_update_devices.side_effect = AuthenticationError freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -139,7 +139,7 @@ async def test_auth_failure_on_device_update( assert not any(mock_config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_door_doorbell = mock_ring_devices.get_device(987654) - front_door_doorbell.history.side_effect = AuthenticationError + front_door_doorbell.async_history.side_effect = AuthenticationError freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -178,7 +178,7 @@ async def test_error_on_global_update( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - mock_ring_client.update_devices.side_effect = error_type + mock_ring_client.async_update_devices.side_effect = error_type freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -219,7 +219,7 @@ async def test_error_on_device_update( await hass.async_block_till_done() front_door_doorbell = mock_ring_devices.get_device(765432) - front_door_doorbell.history.side_effect = error_type + front_door_doorbell.async_history.side_effect = error_type freezer.tick(SCAN_INTERVAL) async_fire_time_changed(hass) @@ -386,3 +386,30 @@ async def test_update_unique_id_no_update( assert entity_migrated assert entity_migrated.unique_id == correct_unique_id assert "Fixing non string unique id" not in caplog.text + + +async def test_token_updated( + hass: HomeAssistant, + freezer: FrozenDateTimeFactory, + mock_config_entry: MockConfigEntry, + mock_ring_client, + mock_ring_init_auth_class, +) -> None: + """Test that the token value is updated in the config entry. + + This simulates the api calling the callback. + """ + mock_config_entry.add_to_hass(hass) + assert await hass.config_entries.async_setup(mock_config_entry.entry_id) + + assert mock_ring_init_auth_class.call_count == 1 + token_updater = mock_ring_init_auth_class.call_args.args[2] + assert mock_config_entry.data[CONF_TOKEN] == {"access_token": "mock-token"} + + mock_ring_client.async_update_devices.side_effect = lambda: token_updater( + {"access_token": "new-mock-token"} + ) + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert mock_config_entry.data[CONF_TOKEN] == {"access_token": "new-mock-token"} diff --git a/tests/components/ring/test_light.py b/tests/components/ring/test_light.py index c2d21a22951..22ed4a31cf8 100644 --- a/tests/components/ring/test_light.py +++ b/tests/components/ring/test_light.py @@ -1,7 +1,5 @@ """The tests for the Ring light platform.""" -from unittest.mock import PropertyMock - import pytest import ring_doorbell @@ -109,15 +107,14 @@ async def test_light_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_light_mock = mock_ring_devices.get_device(765432) - p = PropertyMock(side_effect=exception_type) - type(front_light_mock).lights = p + front_light_mock.async_set_lights.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( "light", "turn_on", {"entity_id": "light.front_light"}, blocking=True ) await hass.async_block_till_done() - p.assert_called_once() + front_light_mock.async_set_lights.assert_called_once() assert ( any( diff --git a/tests/components/ring/test_siren.py b/tests/components/ring/test_siren.py index 695b54c3971..e71dd1e6e77 100644 --- a/tests/components/ring/test_siren.py +++ b/tests/components/ring/test_siren.py @@ -49,7 +49,7 @@ async def test_default_ding_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -71,7 +71,7 @@ async def test_turn_on_plays_default_chime( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -95,7 +95,7 @@ async def test_explicit_ding_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="ding") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="ding") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -117,7 +117,7 @@ async def test_motion_chime_can_be_played( await hass.async_block_till_done() downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="motion") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="motion") state = hass.states.get("siren.downstairs_siren") assert state.state == "unknown" @@ -146,7 +146,7 @@ async def test_siren_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) downstairs_chime_mock = mock_ring_devices.get_device(123456) - downstairs_chime_mock.test_sound.side_effect = exception_type + downstairs_chime_mock.async_test_sound.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( @@ -155,7 +155,8 @@ async def test_siren_errors_when_turned_on( {"entity_id": "siren.downstairs_siren", "tone": "motion"}, blocking=True, ) - downstairs_chime_mock.test_sound.assert_called_once_with(kind="motion") + downstairs_chime_mock.async_test_sound.assert_called_once_with(kind="motion") + await hass.async_block_till_done() assert ( any( flow diff --git a/tests/components/ring/test_switch.py b/tests/components/ring/test_switch.py index 405f20420b7..f7aa885342a 100644 --- a/tests/components/ring/test_switch.py +++ b/tests/components/ring/test_switch.py @@ -1,7 +1,5 @@ """The tests for the Ring switch platform.""" -from unittest.mock import PropertyMock - import pytest import ring_doorbell @@ -116,15 +114,14 @@ async def test_switch_errors_when_turned_on( assert not any(config_entry.async_get_active_flows(hass, {SOURCE_REAUTH})) front_siren_mock = mock_ring_devices.get_device(765432) - p = PropertyMock(side_effect=exception_type) - type(front_siren_mock).siren = p + front_siren_mock.async_set_siren.side_effect = exception_type with pytest.raises(HomeAssistantError): await hass.services.async_call( "switch", "turn_on", {"entity_id": "switch.front_siren"}, blocking=True ) await hass.async_block_till_done() - p.assert_called_once() + front_siren_mock.async_set_siren.assert_called_once() assert ( any( flow diff --git a/tests/components/risco/conftest.py b/tests/components/risco/conftest.py index ab3b64b245d..3961d85d694 100644 --- a/tests/components/risco/conftest.py +++ b/tests/components/risco/conftest.py @@ -1,7 +1,10 @@ """Fixtures for Risco tests.""" +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import MagicMock, PropertyMock, patch +from pyrisco.cloud.event import Event import pytest from homeassistant.components.risco.const import DOMAIN, TYPE_LOCAL @@ -13,6 +16,7 @@ from homeassistant.const import ( CONF_TYPE, CONF_USERNAME, ) +from homeassistant.core import HomeAssistant from .util import TEST_SITE_NAME, TEST_SITE_UUID, system_mock, zone_mock @@ -116,19 +120,19 @@ def two_zone_local(): @pytest.fixture -def options(): +def options() -> dict[str, Any]: """Fixture for default (empty) options.""" return {} @pytest.fixture -def events(): +def events() -> list[Event]: """Fixture for default (empty) events.""" return [] @pytest.fixture -def cloud_config_entry(hass, options): +def cloud_config_entry(hass: HomeAssistant, options: dict[str, Any]) -> MockConfigEntry: """Fixture for a cloud config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, @@ -151,7 +155,9 @@ def login_with_error(exception): @pytest.fixture -async def setup_risco_cloud(hass, cloud_config_entry, events): +async def setup_risco_cloud( + hass: HomeAssistant, cloud_config_entry: MockConfigEntry, events: list[Event] +) -> AsyncGenerator[MockConfigEntry]: """Set up a Risco integration for testing.""" with ( patch( @@ -181,7 +187,7 @@ async def setup_risco_cloud(hass, cloud_config_entry, events): @pytest.fixture -def local_config_entry(hass, options): +def local_config_entry(hass: HomeAssistant, options: dict[str, Any]) -> MockConfigEntry: """Fixture for a local config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, data=TEST_LOCAL_CONFIG, options=options @@ -201,7 +207,9 @@ def connect_with_error(exception): @pytest.fixture -async def setup_risco_local(hass, local_config_entry): +async def setup_risco_local( + hass: HomeAssistant, local_config_entry: MockConfigEntry +) -> AsyncGenerator[MockConfigEntry]: """Set up a local Risco integration for testing.""" with ( patch( diff --git a/tests/components/risco/test_alarm_control_panel.py b/tests/components/risco/test_alarm_control_panel.py index 53d5b9573b6..9b554ddbf28 100644 --- a/tests/components/risco/test_alarm_control_panel.py +++ b/tests/components/risco/test_alarm_control_panel.py @@ -1,5 +1,7 @@ """Tests for the Risco alarm control panel device.""" +from collections.abc import Callable +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch import pytest @@ -180,8 +182,13 @@ async def test_cloud_setup( async def _check_cloud_state( - hass, partitions, property, state, entity_id, partition_id -): + hass: HomeAssistant, + partitions: dict[int, Any], + property: str, + state: str, + entity_id: str, + partition_id: int, +) -> None: with patch.object(partitions[partition_id], property, return_value=True): await async_update_entity(hass, entity_id) await hass.async_block_till_done() @@ -256,7 +263,9 @@ async def test_cloud_states( ) -async def _call_alarm_service(hass, service, entity_id, **kwargs): +async def _call_alarm_service( + hass: HomeAssistant, service: str, entity_id: str, **kwargs: Any +) -> None: data = {"entity_id": entity_id, **kwargs} await hass.services.async_call( @@ -265,16 +274,27 @@ async def _call_alarm_service(hass, service, entity_id, **kwargs): async def _test_cloud_service_call( - hass, service, method, entity_id, partition_id, *args, **kwargs -): + hass: HomeAssistant, + service: str, + method: str, + entity_id: str, + partition_id: int, + *args: Any, + **kwargs: Any, +) -> None: with patch(f"homeassistant.components.risco.RiscoCloud.{method}") as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_awaited_once_with(partition_id, *args) async def _test_cloud_no_service_call( - hass, service, method, entity_id, partition_id, **kwargs -): + hass: HomeAssistant, + service: str, + method: str, + entity_id: str, + partition_id: int, + **kwargs: Any, +) -> None: with patch(f"homeassistant.components.risco.RiscoCloud.{method}") as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_not_awaited() @@ -531,8 +551,14 @@ async def test_local_setup( async def _check_local_state( - hass, partitions, property, state, entity_id, partition_id, callback -): + hass: HomeAssistant, + partitions: dict[int, Any], + property: str, + state: str, + entity_id: str, + partition_id: int, + callback: Callable, +) -> None: with patch.object(partitions[partition_id], property, return_value=True): await callback(partition_id, partitions[partition_id]) @@ -629,16 +655,27 @@ async def test_local_states( async def _test_local_service_call( - hass, service, method, entity_id, partition, *args, **kwargs -): + hass: HomeAssistant, + service: str, + method: str, + entity_id: str, + partition: int, + *args: Any, + **kwargs: Any, +) -> None: with patch.object(partition, method, AsyncMock()) as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_awaited_once_with(*args) async def _test_local_no_service_call( - hass, service, method, entity_id, partition, **kwargs -): + hass: HomeAssistant, + service: str, + method: str, + entity_id: str, + partition: int, + **kwargs: Any, +) -> None: with patch.object(partition, method, AsyncMock()) as set_mock: await _call_alarm_service(hass, service, entity_id, **kwargs) set_mock.assert_not_awaited() diff --git a/tests/components/risco/test_binary_sensor.py b/tests/components/risco/test_binary_sensor.py index b6ff29a0bce..600cfa02c0e 100644 --- a/tests/components/risco/test_binary_sensor.py +++ b/tests/components/risco/test_binary_sensor.py @@ -1,6 +1,8 @@ """Tests for the Risco binary sensors.""" -from unittest.mock import PropertyMock, patch +from collections.abc import Callable +from typing import Any +from unittest.mock import MagicMock, PropertyMock, patch import pytest @@ -59,7 +61,13 @@ async def test_cloud_setup( assert device.manufacturer == "Risco" -async def _check_cloud_state(hass, zones, triggered, entity_id, zone_id): +async def _check_cloud_state( + hass: HomeAssistant, + zones: dict[int, Any], + triggered: bool, + entity_id: str, + zone_id: int, +) -> None: with patch.object( zones[zone_id], "triggered", @@ -130,8 +138,14 @@ async def test_local_setup( async def _check_local_state( - hass, zones, entity_property, value, entity_id, zone_id, callback -): + hass: HomeAssistant, + zones: dict[int, Any], + entity_property: str, + value: bool, + entity_id: str, + zone_id: int, + callback: Callable, +) -> None: with patch.object( zones[zone_id], entity_property, @@ -218,7 +232,13 @@ async def test_armed_local_states( ) -async def _check_system_state(hass, system, entity_property, value, callback): +async def _check_system_state( + hass: HomeAssistant, + system: MagicMock, + entity_property: str, + value: bool, + callback: Callable, +) -> None: with patch.object( system, entity_property, diff --git a/tests/components/risco/test_config_flow.py b/tests/components/risco/test_config_flow.py index 9fade18ea96..cff5f80e6c4 100644 --- a/tests/components/risco/test_config_flow.py +++ b/tests/components/risco/test_config_flow.py @@ -154,14 +154,12 @@ async def test_form_cloud_already_exists(hass: HomeAssistant) -> None: assert result3["reason"] == "already_configured" -async def test_form_reauth(hass: HomeAssistant, cloud_config_entry) -> None: +async def test_form_reauth( + hass: HomeAssistant, cloud_config_entry: MockConfigEntry +) -> None: """Test reauthenticate.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=cloud_config_entry.data, - ) + result = await cloud_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -194,15 +192,11 @@ async def test_form_reauth(hass: HomeAssistant, cloud_config_entry) -> None: async def test_form_reauth_with_new_username( - hass: HomeAssistant, cloud_config_entry + hass: HomeAssistant, cloud_config_entry: MockConfigEntry ) -> None: """Test reauthenticate with new username.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=cloud_config_entry.data, - ) + result = await cloud_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/risco/test_sensor.py b/tests/components/risco/test_sensor.py index 4c8f7bb4180..6a3ac6f42e3 100644 --- a/tests/components/risco/test_sensor.py +++ b/tests/components/risco/test_sensor.py @@ -136,7 +136,7 @@ async def test_error_on_login( assert not entity_registry.async_is_registered(entity_id) -def _check_state(hass, category, entity_id): +def _check_state(hass: HomeAssistant, category: str, entity_id: str) -> None: event_index = CATEGORIES_TO_EVENTS[category] event = TEST_EVENTS[event_index] state = hass.states.get(entity_id) @@ -160,7 +160,7 @@ def _check_state(hass, category, entity_id): @pytest.fixture -async def _set_utc_time_zone(hass): +async def _set_utc_time_zone(hass: HomeAssistant) -> None: await hass.config.async_set_time_zone("UTC") diff --git a/tests/components/risco/test_switch.py b/tests/components/risco/test_switch.py index acf80462d54..54e7bc3ca0c 100644 --- a/tests/components/risco/test_switch.py +++ b/tests/components/risco/test_switch.py @@ -1,5 +1,7 @@ """Tests for the Risco binary sensors.""" +from collections.abc import Callable +from typing import Any from unittest.mock import PropertyMock, patch import pytest @@ -40,7 +42,13 @@ async def test_cloud_setup( assert entity_registry.async_is_registered(SECOND_ENTITY_ID) -async def _check_cloud_state(hass, zones, bypassed, entity_id, zone_id): +async def _check_cloud_state( + hass: HomeAssistant, + zones: dict[int, Any], + bypassed: bool, + entity_id: str, + zone_id: int, +) -> None: with patch.object( zones[zone_id], "bypassed", @@ -117,7 +125,14 @@ async def test_local_setup( assert entity_registry.async_is_registered(SECOND_ENTITY_ID) -async def _check_local_state(hass, zones, bypassed, entity_id, zone_id, callback): +async def _check_local_state( + hass: HomeAssistant, + zones: dict[int, Any], + bypassed: bool, + entity_id: str, + zone_id: int, + callback: Callable, +) -> None: with patch.object( zones[zone_id], "bypassed", diff --git a/tests/components/roborock/test_config_flow.py b/tests/components/roborock/test_config_flow.py index a5a86e44372..39d8117847c 100644 --- a/tests/components/roborock/test_config_flow.py +++ b/tests/components/roborock/test_config_flow.py @@ -4,6 +4,7 @@ from copy import deepcopy from unittest.mock import patch import pytest +from roborock import RoborockTooFrequentCodeRequests from roborock.exceptions import ( RoborockAccountDoesNotExist, RoborockException, @@ -71,6 +72,7 @@ async def test_config_flow_success( (RoborockException(), {"base": "unknown_roborock"}), (RoborockAccountDoesNotExist(), {"base": "invalid_email"}), (RoborockInvalidEmail(), {"base": "invalid_email_format"}), + (RoborockTooFrequentCodeRequests(), {"base": "too_frequent_code_requests"}), (RoborockUrlException(), {"base": "unknown_url"}), (Exception(), {"base": "unknown"}), ], diff --git a/tests/components/roon/test_config_flow.py b/tests/components/roon/test_config_flow.py index 9822c88fa48..9539a9c0f5b 100644 --- a/tests/components/roon/test_config_flow.py +++ b/tests/components/roon/test_config_flow.py @@ -48,7 +48,7 @@ class RoonApiMockException(RoonApiMock): @property def token(self): """Throw exception.""" - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 class RoonDiscoveryMock: diff --git a/tests/components/rpi_power/test_binary_sensor.py b/tests/components/rpi_power/test_binary_sensor.py index 1643df6c993..865d7c035b8 100644 --- a/tests/components/rpi_power/test_binary_sensor.py +++ b/tests/components/rpi_power/test_binary_sensor.py @@ -24,7 +24,7 @@ ENTITY_ID = "binary_sensor.rpi_power_status" MODULE = "homeassistant.components.rpi_power.binary_sensor.new_under_voltage" -async def _async_setup_component(hass, detected): +async def _async_setup_component(hass: HomeAssistant, detected: bool) -> MagicMock: mocked_under_voltage = MagicMock() type(mocked_under_voltage).get = MagicMock(return_value=detected) entry = MockConfigEntry(domain=DOMAIN) diff --git a/tests/components/ruckus_unleashed/test_config_flow.py b/tests/components/ruckus_unleashed/test_config_flow.py index 5bfe2d941d5..89bd72d99e4 100644 --- a/tests/components/ruckus_unleashed/test_config_flow.py +++ b/tests/components/ruckus_unleashed/test_config_flow.py @@ -83,15 +83,7 @@ async def test_form_user_reauth(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -121,15 +113,7 @@ async def test_form_user_reauth_different_unique_id(hass: HomeAssistant) -> None entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -161,15 +145,7 @@ async def test_form_user_reauth_invalid_auth(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -201,15 +177,7 @@ async def test_form_user_reauth_cannot_connect(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -241,15 +209,7 @@ async def test_form_user_reauth_general_exception(hass: HomeAssistant) -> None: entry = mock_config_entry() entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/russound_rio/const.py b/tests/components/russound_rio/const.py index d1f6aa7eead..527f4fe3377 100644 --- a/tests/components/russound_rio/const.py +++ b/tests/components/russound_rio/const.py @@ -12,5 +12,5 @@ MOCK_CONFIG = { "port": PORT, } -_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) +_CONTROLLER = namedtuple("Controller", ["mac_address", "controller_type"]) # noqa: PYI024 MOCK_CONTROLLERS = {1: _CONTROLLER(mac_address=HARDWARE_MAC, controller_type=MODEL)} diff --git a/tests/components/rympro/test_config_flow.py b/tests/components/rympro/test_config_flow.py index e92b7c23357..7770889bdeb 100644 --- a/tests/components/rympro/test_config_flow.py +++ b/tests/components/rympro/test_config_flow.py @@ -160,17 +160,10 @@ async def test_form_already_exists(hass: HomeAssistant, config_entry) -> None: assert result2["reason"] == "already_configured" -async def test_form_reauth(hass: HomeAssistant, config_entry) -> None: +async def test_form_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test reauthentication.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -203,17 +196,12 @@ async def test_form_reauth(hass: HomeAssistant, config_entry) -> None: assert len(mock_setup_entry.mock_calls) == 1 -async def test_form_reauth_with_new_account(hass: HomeAssistant, config_entry) -> None: +async def test_form_reauth_with_new_account( + hass: HomeAssistant, config_entry: MockConfigEntry +) -> None: """Test reauthentication with new account.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/samsungtv/conftest.py b/tests/components/samsungtv/conftest.py index 752bce3b960..ec12031ef96 100644 --- a/tests/components/samsungtv/conftest.py +++ b/tests/components/samsungtv/conftest.py @@ -179,7 +179,7 @@ def rest_api_fixture_non_ssl_only() -> Mock: class MockSamsungTVAsyncRest: """Mock for a MockSamsungTVAsyncRest.""" - def __init__(self, host, session, port, timeout): + def __init__(self, host, session, port, timeout) -> None: """Mock a MockSamsungTVAsyncRest.""" self.port = port self.host = host diff --git a/tests/components/samsungtv/snapshots/test_init.ambr b/tests/components/samsungtv/snapshots/test_init.ambr index 42a3f4fb396..061b5bc1836 100644 --- a/tests/components/samsungtv/snapshots/test_init.ambr +++ b/tests/components/samsungtv/snapshots/test_init.ambr @@ -30,8 +30,10 @@ }), 'manufacturer': None, 'model': '82GXARRS', + 'model_id': None, 'name': 'fake', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, @@ -50,6 +52,10 @@ 'mac', 'aa:bb:cc:dd:ee:ff', ), + tuple( + 'mac', + 'none', + ), }), 'disabled_by': None, 'entry_type': None, @@ -66,8 +72,10 @@ }), 'manufacturer': None, 'model': '82GXARRS', + 'model_id': None, 'name': 'fake', 'name_by_user': None, + 'primary_config_entry': , 'serial_number': None, 'suggested_area': None, 'sw_version': None, diff --git a/tests/components/samsungtv/test_config_flow.py b/tests/components/samsungtv/test_config_flow.py index 6c325ae3b04..43d8c81d000 100644 --- a/tests/components/samsungtv/test_config_flow.py +++ b/tests/components/samsungtv/test_config_flow.py @@ -1749,11 +1749,7 @@ async def test_form_reauth_legacy(hass: HomeAssistant) -> None: """Test reauthenticate legacy.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_OLD_ENTRY) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1773,11 +1769,7 @@ async def test_form_reauth_websocket(hass: HomeAssistant) -> None: entry.add_to_hass(hass) assert entry.state is ConfigEntryState.NOT_LOADED - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1798,11 +1790,7 @@ async def test_form_reauth_websocket_cannot_connect( """Test reauthenticate websocket when we cannot connect on the first attempt.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRYDATA_WS) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1830,11 +1818,7 @@ async def test_form_reauth_websocket_not_supported(hass: HomeAssistant) -> None: """Test reauthenticate websocket when the device is not supported.""" entry = MockConfigEntry(domain=DOMAIN, data=MOCK_ENTRYDATA_WS) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -1863,11 +1847,7 @@ async def test_form_reauth_encrypted(hass: HomeAssistant) -> None: entry.add_to_hass(hass) assert entry.state is ConfigEntryState.NOT_LOADED - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"entry_id": entry.entry_id, "source": config_entries.SOURCE_REAUTH}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/scene/common.py b/tests/components/scene/common.py index e20da63c402..39f86818744 100644 --- a/tests/components/scene/common.py +++ b/tests/components/scene/common.py @@ -6,11 +6,12 @@ components. Instead call the service directly. from homeassistant.components.scene import DOMAIN from homeassistant.const import ATTR_ENTITY_ID, ENTITY_MATCH_ALL, SERVICE_TURN_ON +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def activate(hass, entity_id=ENTITY_MATCH_ALL): +def activate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Activate a scene.""" data = {} diff --git a/tests/components/scene/test_init.py b/tests/components/scene/test_init.py index 5afdebda9da..3747610298d 100644 --- a/tests/components/scene/test_init.py +++ b/tests/components/scene/test_init.py @@ -222,7 +222,7 @@ async def test_restore_state_does_not_restore_unavailable( assert hass.states.get("scene.test").state == STATE_UNKNOWN -async def activate(hass, entity_id=ENTITY_MATCH_ALL): +async def activate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Activate a scene.""" data = {} @@ -241,7 +241,9 @@ async def test_services_registered(hass: HomeAssistant) -> None: assert hass.services.has_service("scene", "apply") -async def setup_lights(hass, entities): +async def setup_lights( + hass: HomeAssistant, entities: list[MockLight] +) -> tuple[MockLight, MockLight]: """Set up the light component.""" assert await async_setup_component( hass, light.DOMAIN, {light.DOMAIN: {"platform": "test"}} @@ -261,7 +263,7 @@ async def setup_lights(hass, entities): return light_1, light_2 -async def turn_off_lights(hass, entity_ids): +async def turn_off_lights(hass: HomeAssistant, entity_ids: list[str]) -> None: """Turn lights off.""" await hass.services.async_call( "light", diff --git a/tests/components/sense/test_config_flow.py b/tests/components/sense/test_config_flow.py index e564603ea87..0ba8d94e17b 100644 --- a/tests/components/sense/test_config_flow.py +++ b/tests/components/sense/test_config_flow.py @@ -268,9 +268,7 @@ async def test_reauth_no_form(hass: HomeAssistant, mock_sense) -> None: "homeassistant.config_entries.ConfigEntries.async_reload", return_value=True, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=MOCK_CONFIG - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" @@ -288,9 +286,7 @@ async def test_reauth_password(hass: HomeAssistant, mock_sense) -> None: mock_sense.return_value.authenticate.side_effect = SenseAuthenticationException # Reauth success without user input - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM mock_sense.return_value.authenticate.side_effect = None diff --git a/tests/components/sensibo/snapshots/test_diagnostics.ambr b/tests/components/sensibo/snapshots/test_diagnostics.ambr index a33209f7c88..cc77318239e 100644 --- a/tests/components/sensibo/snapshots/test_diagnostics.ambr +++ b/tests/components/sensibo/snapshots/test_diagnostics.ambr @@ -91,7 +91,8 @@ 'motion_sensors': dict({ }), 'name': 'Kitchen', - 'pm25': 1, + 'pm25': None, + 'pm25_pure': 1, 'pure_ac_integration': False, 'pure_boost_enabled': False, 'pure_conf': dict({ @@ -424,6 +425,7 @@ }), 'name': 'Hallway', 'pm25': None, + 'pm25_pure': None, 'pure_ac_integration': None, 'pure_boost_enabled': None, 'pure_conf': dict({ @@ -550,7 +552,8 @@ 'motion_sensors': dict({ }), 'name': 'Bedroom', - 'pm25': 1, + 'pm25': None, + 'pm25_pure': 1, 'pure_ac_integration': False, 'pure_boost_enabled': False, 'pure_conf': dict({ diff --git a/tests/components/sensibo/snapshots/test_sensor.ambr b/tests/components/sensibo/snapshots/test_sensor.ambr index d645bdbd383..cd8d510b6cc 100644 --- a/tests/components/sensibo/snapshots/test_sensor.ambr +++ b/tests/components/sensibo/snapshots/test_sensor.ambr @@ -1,10 +1,13 @@ # serializer version: 1 # name: test_sensor ReadOnlyDict({ - 'device_class': 'pm25', - 'friendly_name': 'Kitchen PM2.5', - 'state_class': , - 'unit_of_measurement': 'µg/m³', + 'device_class': 'enum', + 'friendly_name': 'Kitchen Pure AQI', + 'options': list([ + 'good', + 'moderate', + 'bad', + ]), }) # --- # name: test_sensor.1 diff --git a/tests/components/sensibo/test_config_flow.py b/tests/components/sensibo/test_config_flow.py index e994402b09f..3f53495f0f2 100644 --- a/tests/components/sensibo/test_config_flow.py +++ b/tests/components/sensibo/test_config_flow.py @@ -192,15 +192,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -254,15 +246,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.sensibo.util.SensiboClient.async_get_devices", @@ -338,15 +322,7 @@ async def test_flow_reauth_no_username_or_device( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sensibo/test_sensor.py b/tests/components/sensibo/test_sensor.py index 3c6fb584a6e..5fc761f178a 100644 --- a/tests/components/sensibo/test_sensor.py +++ b/tests/components/sensibo/test_sensor.py @@ -5,7 +5,7 @@ from __future__ import annotations from datetime import timedelta from unittest.mock import patch -from pysensibo.model import SensiboData +from pysensibo.model import PureAQI, SensiboData import pytest from syrupy.assertion import SnapshotAssertion @@ -27,17 +27,17 @@ async def test_sensor( """Test the Sensibo sensor.""" state1 = hass.states.get("sensor.hallway_motion_sensor_battery_voltage") - state2 = hass.states.get("sensor.kitchen_pm2_5") + state2 = hass.states.get("sensor.kitchen_pure_aqi") state3 = hass.states.get("sensor.kitchen_pure_sensitivity") state4 = hass.states.get("sensor.hallway_climate_react_low_temperature_threshold") assert state1.state == "3000" - assert state2.state == "1" + assert state2.state == "good" assert state3.state == "n" assert state4.state == "0.0" assert state2.attributes == snapshot assert state4.attributes == snapshot - monkeypatch.setattr(get_data.parsed["AAZZAAZZ"], "pm25", 2) + monkeypatch.setattr(get_data.parsed["AAZZAAZZ"], "pm25_pure", PureAQI(2)) with patch( "homeassistant.components.sensibo.coordinator.SensiboClient.async_get_devices_data", @@ -49,5 +49,5 @@ async def test_sensor( ) await hass.async_block_till_done() - state1 = hass.states.get("sensor.kitchen_pm2_5") - assert state1.state == "2" + state1 = hass.states.get("sensor.kitchen_pure_aqi") + assert state1.state == "moderate" diff --git a/tests/components/sensor/common.py b/tests/components/sensor/common.py index 53a93b73da3..458009b2690 100644 --- a/tests/components/sensor/common.py +++ b/tests/components/sensor/common.py @@ -10,11 +10,11 @@ from homeassistant.const import ( CONCENTRATION_PARTS_PER_MILLION, LIGHT_LUX, PERCENTAGE, - POWER_VOLT_AMPERE_REACTIVE, SIGNAL_STRENGTH_DECIBELS, UnitOfApparentPower, UnitOfFrequency, UnitOfPressure, + UnitOfReactivePower, UnitOfVolume, ) @@ -44,7 +44,7 @@ UNITS_OF_MEASUREMENT = { SensorDeviceClass.ENERGY: "kWh", # energy (Wh/kWh/MWh) SensorDeviceClass.FREQUENCY: UnitOfFrequency.GIGAHERTZ, # energy (Hz/kHz/MHz/GHz) SensorDeviceClass.POWER_FACTOR: PERCENTAGE, # power factor (no unit, min: -1.0, max: 1.0) - SensorDeviceClass.REACTIVE_POWER: POWER_VOLT_AMPERE_REACTIVE, # reactive power (var) + SensorDeviceClass.REACTIVE_POWER: UnitOfReactivePower.VOLT_AMPERE_REACTIVE, # reactive power (var) SensorDeviceClass.VOLATILE_ORGANIC_COMPOUNDS: CONCENTRATION_MICROGRAMS_PER_CUBIC_METER, # µg/m³ of vocs SensorDeviceClass.VOLTAGE: "V", # voltage (V) SensorDeviceClass.GAS: UnitOfVolume.CUBIC_METERS, # gas (m³) diff --git a/tests/components/sensor/test_recorder.py b/tests/components/sensor/test_recorder.py index 27fab9c0b3b..4d271785114 100644 --- a/tests/components/sensor/test_recorder.py +++ b/tests/components/sensor/test_recorder.py @@ -5104,7 +5104,9 @@ async def async_record_meter_state( return states -async def async_record_states_partially_unavailable(hass, zero, entity_id, attributes): +async def async_record_states_partially_unavailable( + hass: HomeAssistant, zero: datetime, entity_id: str, attributes: dict[str, Any] +) -> tuple[datetime, dict[str, list[State]]]: """Record some test states. We inject a bunch of state updates temperature sensors. diff --git a/tests/components/sensor/test_recorder_missing_stats.py b/tests/components/sensor/test_recorder_missing_stats.py index d770c459426..43e18b89e72 100644 --- a/tests/components/sensor/test_recorder_missing_stats.py +++ b/tests/components/sensor/test_recorder_missing_stats.py @@ -1,7 +1,6 @@ """The tests for sensor recorder platform can catch up.""" from datetime import datetime, timedelta -from pathlib import Path import threading from unittest.mock import patch @@ -17,11 +16,15 @@ from homeassistant.components.recorder.statistics import ( from homeassistant.components.recorder.util import session_scope from homeassistant.core import CoreState from homeassistant.helpers import recorder as recorder_helper -from homeassistant.setup import setup_component +from homeassistant.setup import async_setup_component import homeassistant.util.dt as dt_util -from tests.common import get_test_home_assistant -from tests.components.recorder.common import do_adhoc_statistics, wait_recording_done +from tests.common import async_test_home_assistant +from tests.components.recorder.common import ( + async_wait_recording_done, + do_adhoc_statistics, +) +from tests.typing import RecorderInstanceGenerator POWER_SENSOR_ATTRIBUTES = { "device_class": "energy", @@ -40,37 +43,34 @@ def disable_db_issue_creation(): @pytest.mark.timeout(25) -def test_compile_missing_statistics( - freezer: FrozenDateTimeFactory, recorder_db_url: str, tmp_path: Path +@pytest.mark.parametrize("persistent_database", [True]) +@pytest.mark.parametrize("enable_missing_statistics", [True]) +@pytest.mark.usefixtures("hass_storage") # Prevent test hass from writing to storage +async def test_compile_missing_statistics( + async_test_recorder: RecorderInstanceGenerator, freezer: FrozenDateTimeFactory ) -> None: """Test compile missing statistics.""" - if recorder_db_url == "sqlite://": - # On-disk database because we need to stop and start hass - # and have it persist. - recorder_db_url = "sqlite:///" + str(tmp_path / "pytest.db") - config = { - "db_url": recorder_db_url, - } three_days_ago = datetime(2021, 1, 1, 0, 0, 0, tzinfo=dt_util.UTC) start_time = three_days_ago + timedelta(days=3) freezer.move_to(three_days_ago) - with get_test_home_assistant() as hass: - hass.set_state(CoreState.not_running) + async with ( + async_test_home_assistant(initial_state=CoreState.not_running) as hass, + async_test_recorder(hass, wait_recorder=False), + ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "sensor", {}) - setup_component(hass, "recorder", {"recorder": config}) + await async_setup_component(hass, "sensor", {}) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) - hass.states.set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) - wait_recording_done(hass) + hass.states.async_set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) + await async_wait_recording_done(hass) two_days_ago = three_days_ago + timedelta(days=1) freezer.move_to(two_days_ago) do_adhoc_statistics(hass, start=two_days_ago) - wait_recording_done(hass) + await async_wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: latest = get_latest_short_term_statistics_with_session( hass, session, {"sensor.test1"}, {"state", "sum"} @@ -82,29 +82,32 @@ def test_compile_missing_statistics( past_time = two_days_ago while past_time <= start_time: freezer.move_to(past_time) - hass.states.set("sensor.test1", str(count), POWER_SENSOR_ATTRIBUTES) + hass.states.async_set("sensor.test1", str(count), POWER_SENSOR_ATTRIBUTES) past_time += timedelta(minutes=5) count += 1 - wait_recording_done(hass) + await async_wait_recording_done(hass) states = get_significant_states( hass, three_days_ago, past_time, ["sensor.test1"] ) assert len(states["sensor.test1"]) == 577 - hass.stop() + await hass.async_stop() + await hass.async_block_till_done() + freezer.move_to(start_time) - with get_test_home_assistant() as hass: - hass.set_state(CoreState.not_running) + async with ( + async_test_home_assistant(initial_state=CoreState.not_running) as hass, + async_test_recorder(hass, wait_recorder=False), + ): recorder_helper.async_initialize_recorder(hass) - setup_component(hass, "sensor", {}) - hass.states.set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) - setup_component(hass, "recorder", {"recorder": config}) + await async_setup_component(hass, "sensor", {}) + hass.states.async_set("sensor.test1", "0", POWER_SENSOR_ATTRIBUTES) get_instance(hass).recorder_and_worker_thread_ids.add(threading.get_ident()) - hass.start() - wait_recording_done(hass) - wait_recording_done(hass) + await hass.async_start() + await async_wait_recording_done(hass) + await async_wait_recording_done(hass) with session_scope(hass=hass, read_only=True) as session: latest = get_latest_short_term_statistics_with_session( hass, session, {"sensor.test1"}, {"state", "sum", "max", "mean", "min"} @@ -128,4 +131,4 @@ def test_compile_missing_statistics( assert len(stats["sensor.test1"]) == 48 # Make sure the last mean is 570.5 assert stats["sensor.test1"][-1]["mean"] == 570.5 - hass.stop() + await hass.async_stop() diff --git a/tests/components/sentry/conftest.py b/tests/components/sentry/conftest.py index 781250b2753..663f8ee6aa6 100644 --- a/tests/components/sentry/conftest.py +++ b/tests/components/sentry/conftest.py @@ -6,7 +6,7 @@ from typing import Any import pytest -from homeassistant.components.sentry import DOMAIN +from homeassistant.components.sentry.const import DOMAIN from tests.common import MockConfigEntry diff --git a/tests/components/sfr_box/test_config_flow.py b/tests/components/sfr_box/test_config_flow.py index 08c12e9817b..6bf610de661 100644 --- a/tests/components/sfr_box/test_config_flow.py +++ b/tests/components/sfr_box/test_config_flow.py @@ -207,15 +207,7 @@ async def test_reauth(hass: HomeAssistant, config_entry_with_auth: ConfigEntry) """Test the start of the config flow.""" assert len(hass.config_entries.async_entries(DOMAIN)) == 1 - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": config_entry_with_auth.entry_id, - "unique_id": config_entry_with_auth.unique_id, - }, - data=config_entry_with_auth.data, - ) + result = await config_entry_with_auth.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} diff --git a/tests/components/sharkiq/test_config_flow.py b/tests/components/sharkiq/test_config_flow.py index cf75bff1686..22a77678c0d 100644 --- a/tests/components/sharkiq/test_config_flow.py +++ b/tests/components/sharkiq/test_config_flow.py @@ -96,18 +96,18 @@ async def test_form_error(hass: HomeAssistant, exc: Exception, base_error: str) async def test_reauth_success(hass: HomeAssistant) -> None: """Test reauth flow.""" - with patch("sharkiq.AylaApi.async_sign_in", return_value=True): - mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) - mock_config.add_to_hass(hass) + mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, - data=CONFIG, + result = await mock_config.start_reauth_flow(hass) + + with patch("sharkiq.AylaApi.async_sign_in", return_value=True): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=CONFIG ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "reauth_successful" + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" @pytest.mark.parametrize( @@ -127,13 +127,15 @@ async def test_reauth( msg: str, ) -> None: """Test reauth failures.""" - with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "unique_id": UNIQUE_ID}, - data=CONFIG, - ) + mock_config = MockConfigEntry(domain=DOMAIN, unique_id=UNIQUE_ID, data=CONFIG) + mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + + with patch("sharkiq.AylaApi.async_sign_in", side_effect=side_effect): + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input=CONFIG + ) msg_value = result[msg_field] if msg_field == "errors": msg_value = msg_value.get("base") diff --git a/tests/components/sharkiq/test_vacuum.py b/tests/components/sharkiq/test_vacuum.py index e5154008f56..3748cfd6dc4 100644 --- a/tests/components/sharkiq/test_vacuum.py +++ b/tests/components/sharkiq/test_vacuum.py @@ -141,7 +141,7 @@ class MockShark(SharkIqVacuum): @pytest.fixture(autouse=True) @patch("sharkiq.ayla_api.AylaApi", MockAyla) -async def setup_integration(hass): +async def setup_integration(hass: HomeAssistant) -> None: """Build the mock integration.""" entry = MockConfigEntry( domain=DOMAIN, unique_id=TEST_USERNAME, data=CONFIG, entry_id=ENTRY_ID diff --git a/tests/components/shelly/conftest.py b/tests/components/shelly/conftest.py index a2629d21362..a983cbbcda9 100644 --- a/tests/components/shelly/conftest.py +++ b/tests/components/shelly/conftest.py @@ -166,8 +166,20 @@ MOCK_BLOCKS = [ MOCK_CONFIG = { "input:0": {"id": 0, "name": "Test name input 0", "type": "button"}, - "input:1": {"id": 1, "type": "analog", "enable": True}, - "input:2": {"id": 2, "name": "Gas", "type": "count", "enable": True}, + "input:1": { + "id": 1, + "type": "analog", + "enable": True, + "xpercent": {"expr": None, "unit": None}, + }, + "input:2": { + "id": 2, + "name": "Gas", + "type": "count", + "enable": True, + "xcounts": {"expr": None, "unit": None}, + "xfreq": {"expr": None, "unit": None}, + }, "light:0": {"name": "test light_0"}, "light:1": {"name": "test light_1"}, "light:2": {"name": "test light_2"}, @@ -186,6 +198,7 @@ MOCK_CONFIG = { "device": {"name": "Test name"}, }, "wifi": {"sta": {"enable": True}, "sta1": {"enable": False}}, + "ws": {"enable": False, "server": None}, } MOCK_SHELLY_COAP = { diff --git a/tests/components/shelly/test_binary_sensor.py b/tests/components/shelly/test_binary_sensor.py index 18f65deb907..fadfe28db3e 100644 --- a/tests/components/shelly/test_binary_sensor.py +++ b/tests/components/shelly/test_binary_sensor.py @@ -169,9 +169,14 @@ async def test_block_restored_sleeping_binary_sensor( ) -> None: """Test block restored sleeping binary sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_motion", + "sensor_0-motion", + entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) monkeypatch.setattr(mock_block_device, "initialized", False) @@ -196,9 +201,14 @@ async def test_block_restored_sleeping_binary_sensor_no_last_state( ) -> None: """Test block restored sleeping binary sensor missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_motion", + "sensor_0-motion", + entry, + device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) @@ -305,9 +315,14 @@ async def test_rpc_restored_sleeping_binary_sensor( ) -> None: """Test RPC restored binary sensor.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_cloud", + "cloud-cloud", + entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) @@ -334,9 +349,14 @@ async def test_rpc_restored_sleeping_binary_sensor_no_last_state( ) -> None: """Test RPC restored sleeping binary sensor missing last state.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_cloud", + "cloud-cloud", + entry, + device_id=device.id, ) monkeypatch.setattr(mock_rpc_device, "initialized", False) diff --git a/tests/components/shelly/test_climate.py b/tests/components/shelly/test_climate.py index fea46b1d2d1..1156d7e0ed5 100644 --- a/tests/components/shelly/test_climate.py +++ b/tests/components/shelly/test_climate.py @@ -254,13 +254,14 @@ async def test_block_restored_climate( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) monkeypatch.delattr(mock_block_device.blocks[EMETER_BLOCK_ID], "targetTemp") entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) attrs = {"current_temperature": 20.5, "temperature": 4.0} extra_data = {"last_target_temp": 22.0} @@ -321,13 +322,14 @@ async def test_block_restored_climate_us_customery( monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) monkeypatch.delattr(mock_block_device.blocks[EMETER_BLOCK_ID], "targetTemp") entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) attrs = {"current_temperature": 67, "temperature": 39} extra_data = {"last_target_temp": 10.0} @@ -390,13 +392,14 @@ async def test_block_restored_climate_unavailable( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_UNAVAILABLE)]) @@ -417,13 +420,14 @@ async def test_block_restored_climate_set_preset_before_online( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, HVACMode.HEAT)]) @@ -518,13 +522,14 @@ async def test_block_restored_climate_auth_error( monkeypatch.delattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "targetTemp") monkeypatch.setattr(mock_block_device.blocks[DEVICE_BLOCK_ID], "valveError", 0) entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, CLIMATE_DOMAIN, "test_name", "sensor_0", entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, HVACMode.HEAT)]) diff --git a/tests/components/shelly/test_config_flow.py b/tests/components/shelly/test_config_flow.py index 0c574a33e0c..f03d90dbabb 100644 --- a/tests/components/shelly/test_config_flow.py +++ b/tests/components/shelly/test_config_flow.py @@ -4,7 +4,7 @@ from dataclasses import replace from datetime import timedelta from ipaddress import ip_address from typing import Any -from unittest.mock import AsyncMock, Mock, patch +from unittest.mock import AsyncMock, Mock, call, patch from aioshelly.const import DEFAULT_HTTP_PORT, MODEL_1, MODEL_PLUS_2PM from aioshelly.exceptions import ( @@ -23,7 +23,7 @@ from homeassistant.components.shelly.const import ( BLEScannerMode, ) from homeassistant.components.shelly.coordinator import ENTRY_RELOAD_COOLDOWN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE +from homeassistant.config_entries import SOURCE_RECONFIGURE from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.setup import async_setup_component @@ -819,20 +819,15 @@ async def test_reauth_successful( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", return_value={"mac": "test-mac", "type": MODEL_1, "auth": True, "gen": gen}, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, @@ -858,6 +853,9 @@ async def test_reauth_unsuccessful( domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": gen} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -873,15 +871,6 @@ async def test_reauth_unsuccessful( new=AsyncMock(side_effect=InvalidAuthError), ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input=user_input, @@ -897,20 +886,14 @@ async def test_reauth_get_info_error(hass: HomeAssistant) -> None: domain="shelly", unique_id="test-mac", data={"host": "0.0.0.0", "gen": 2} ) entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch( "homeassistant.components.shelly.config_flow.get_info", side_effect=DeviceConnectionError, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={"password": "test2 password"}, @@ -1153,6 +1136,182 @@ async def test_zeroconf_sleeping_device_not_triggers_refresh( assert "device did not update" not in caplog.text +async def test_zeroconf_sleeping_device_attempts_configure( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test zeroconf discovery configures a sleeping device outbound websocket.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + entry = MockConfigEntry( + domain="shelly", + unique_id="AABBCCDDEEFF", + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + mock_rpc_device.mock_disconnected() + await hass.async_block_till_done() + + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert "online, resuming setup" in caplog.text + assert len(mock_rpc_device.initialize.mock_calls) == 1 + + with patch( + "homeassistant.components.shelly.config_flow.get_info", + return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=DISCOVERY_INFO, + context={"source": config_entries.SOURCE_ZEROCONF}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_rpc_device.update_outbound_websocket.mock_calls == [] + + monkeypatch.setattr(mock_rpc_device, "connected", True) + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_initialized() + async_fire_time_changed( + hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) + ) + await hass.async_block_till_done() + assert "device did not update" not in caplog.text + + monkeypatch.setattr(mock_rpc_device, "connected", False) + mock_rpc_device.mock_disconnected() + assert mock_rpc_device.update_outbound_websocket.mock_calls == [ + call("ws://10.10.10.10:8123/api/shelly/ws") + ] + + +async def test_zeroconf_sleeping_device_attempts_configure_ws_disabled( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test zeroconf discovery configures a sleeping device outbound websocket when its disabled.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + monkeypatch.setitem( + mock_rpc_device.config, "ws", {"enable": False, "server": "ws://oldha"} + ) + entry = MockConfigEntry( + domain="shelly", + unique_id="AABBCCDDEEFF", + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + mock_rpc_device.mock_disconnected() + await hass.async_block_till_done() + + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert "online, resuming setup" in caplog.text + assert len(mock_rpc_device.initialize.mock_calls) == 1 + + with patch( + "homeassistant.components.shelly.config_flow.get_info", + return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=DISCOVERY_INFO, + context={"source": config_entries.SOURCE_ZEROCONF}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_rpc_device.update_outbound_websocket.mock_calls == [] + + monkeypatch.setattr(mock_rpc_device, "connected", True) + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_initialized() + async_fire_time_changed( + hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) + ) + await hass.async_block_till_done() + assert "device did not update" not in caplog.text + + monkeypatch.setattr(mock_rpc_device, "connected", False) + mock_rpc_device.mock_disconnected() + assert mock_rpc_device.update_outbound_websocket.mock_calls == [ + call("ws://10.10.10.10:8123/api/shelly/ws") + ] + + +async def test_zeroconf_sleeping_device_attempts_configure_no_url_available( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test zeroconf discovery for sleeping device with no hass url.""" + hass.config.internal_url = None + hass.config.external_url = None + hass.config.api = None + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + entry = MockConfigEntry( + domain="shelly", + unique_id="AABBCCDDEEFF", + data={"host": "1.1.1.1", "gen": 2, "sleep_period": 1000, "model": MODEL_1}, + ) + entry.add_to_hass(hass) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + mock_rpc_device.mock_disconnected() + await hass.async_block_till_done() + + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert "online, resuming setup" in caplog.text + assert len(mock_rpc_device.initialize.mock_calls) == 1 + + with patch( + "homeassistant.components.shelly.config_flow.get_info", + return_value={"mac": "AABBCCDDEEFF", "type": MODEL_1, "auth": False}, + ): + result = await hass.config_entries.flow.async_init( + DOMAIN, + data=DISCOVERY_INFO, + context={"source": config_entries.SOURCE_ZEROCONF}, + ) + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + assert mock_rpc_device.update_outbound_websocket.mock_calls == [] + + monkeypatch.setattr(mock_rpc_device, "connected", True) + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_initialized() + async_fire_time_changed( + hass, dt_util.utcnow() + timedelta(seconds=ENTRY_RELOAD_COOLDOWN) + ) + await hass.async_block_till_done() + assert "device did not update" not in caplog.text + + monkeypatch.setattr(mock_rpc_device, "connected", False) + mock_rpc_device.mock_disconnected() + # No url available so no attempt to configure the device + assert mock_rpc_device.update_outbound_websocket.mock_calls == [] + + async def test_sleeping_device_gen2_with_new_firmware( hass: HomeAssistant, mock_rpc_device: Mock, monkeypatch: pytest.MonkeyPatch ) -> None: diff --git a/tests/components/shelly/test_coordinator.py b/tests/components/shelly/test_coordinator.py index 1140c93775b..47c338e3fad 100644 --- a/tests/components/shelly/test_coordinator.py +++ b/tests/components/shelly/test_coordinator.py @@ -16,6 +16,7 @@ from homeassistant.components.shelly.const import ( ATTR_DEVICE, ATTR_GENERATION, CONF_BLE_SCANNER_MODE, + CONF_SLEEP_PERIOD, DOMAIN, ENTRY_RELOAD_COOLDOWN, MAX_PUSH_UPDATE_FAILURES, @@ -677,7 +678,7 @@ async def test_rpc_polling_auth_error( monkeypatch.setattr( mock_rpc_device, - "update_status", + "poll", AsyncMock( side_effect=InvalidAuthError, ), @@ -767,7 +768,7 @@ async def test_rpc_polling_connection_error( monkeypatch.setattr( mock_rpc_device, - "update_status", + "poll", AsyncMock( side_effect=DeviceConnectionError, ), @@ -886,9 +887,14 @@ async def test_block_sleeping_device_connection_error( """Test block sleeping device connection error during initialize.""" sleep_period = 1000 entry = await init_integration(hass, 1, sleep_period=sleep_period, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_motion", "sensor_0-motion", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_motion", + "sensor_0-motion", + entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) monkeypatch.setattr(mock_block_device, "initialized", False) @@ -931,9 +937,14 @@ async def test_rpc_sleeping_device_connection_error( """Test RPC sleeping device connection error during initialize.""" sleep_period = 1000 entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, BINARY_SENSOR_DOMAIN, "test_name_cloud", "cloud-cloud", entry + hass, + BINARY_SENSOR_DOMAIN, + "test_name_cloud", + "cloud-cloud", + entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_ON)]) monkeypatch.setattr(mock_rpc_device, "connected", False) @@ -966,6 +977,31 @@ async def test_rpc_sleeping_device_connection_error( assert get_entity_state(hass, entity_id) == STATE_UNAVAILABLE +async def test_rpc_sleeping_device_late_setup( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, +) -> None: + """Test RPC sleeping device creates entities if they do not exist yet.""" + entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + assert entry.data[CONF_SLEEP_PERIOD] == 1000 + register_device(device_registry, entry) + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setattr(mock_rpc_device, "initialized", False) + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + + monkeypatch.setattr(mock_rpc_device, "initialized", True) + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + monkeypatch.setattr(mock_rpc_device, "connected", True) + mock_rpc_device.mock_initialized() + await hass.async_block_till_done(wait_background_tasks=True) + assert hass.states.get("sensor.test_name_temperature") is not None + + async def test_rpc_already_connected( hass: HomeAssistant, freezer: FrozenDateTimeFactory, diff --git a/tests/components/shelly/test_diagnostics.py b/tests/components/shelly/test_diagnostics.py index 4fc8ea6ca8f..395c7ccfeaf 100644 --- a/tests/components/shelly/test_diagnostics.py +++ b/tests/components/shelly/test_diagnostics.py @@ -1,5 +1,6 @@ """Tests for Shelly diagnostics platform.""" +from copy import deepcopy from unittest.mock import ANY, Mock, PropertyMock from aioshelly.ble.const import BLE_SCAN_RESULT_EVENT @@ -151,7 +152,7 @@ async def test_rpc_config_entry_diagnostics( "model": MODEL_25, "sw_version": "some fw string", }, - "device_settings": {}, + "device_settings": {"ws_outbound_enabled": False}, "device_status": { "sys": { "available_updates": { @@ -164,3 +165,30 @@ async def test_rpc_config_entry_diagnostics( }, "last_error": "DeviceConnectionError()", } + + +@pytest.mark.parametrize( + ("ws_outbound_server", "ws_outbound_server_valid"), + [("ws://10.10.10.10:8123/api/shelly/ws", True), ("wrong_url", False)], +) +async def test_rpc_config_entry_diagnostics_ws_outbound( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + ws_outbound_server: str, + ws_outbound_server_valid: bool, +) -> None: + """Test config entry diagnostics for rpc device with websocket outbound.""" + config = deepcopy(mock_rpc_device.config) + config["ws"] = {"enable": True, "server": ws_outbound_server} + monkeypatch.setattr(mock_rpc_device, "config", config) + + entry = await init_integration(hass, 2, sleep_period=60) + + result = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert ( + result["device_settings"]["ws_outbound_server_valid"] + == ws_outbound_server_valid + ) diff --git a/tests/components/shelly/test_init.py b/tests/components/shelly/test_init.py index 46698c23c0a..b5516485501 100644 --- a/tests/components/shelly/test_init.py +++ b/tests/components/shelly/test_init.py @@ -310,6 +310,52 @@ async def test_sleeping_rpc_device_online_new_firmware( assert entry.data["sleep_period"] == 1500 +async def test_sleeping_rpc_device_online_during_setup( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sleeping device Gen2 woke up by user during setup.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + await init_integration(hass, 2, sleep_period=1000) + await hass.async_block_till_done(wait_background_tasks=True) + + assert "will resume when device is online" in caplog.text + assert "is online (source: setup)" in caplog.text + assert hass.states.get("sensor.test_name_temperature") is not None + + +async def test_sleeping_rpc_device_offline_during_setup( + hass: HomeAssistant, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test sleeping device Gen2 woke up by user during setup.""" + monkeypatch.setattr(mock_rpc_device, "connected", False) + monkeypatch.setitem(mock_rpc_device.status["sys"], "wakeup_period", 1000) + monkeypatch.setattr( + mock_rpc_device, "initialize", AsyncMock(side_effect=DeviceConnectionError) + ) + + # Init integration, should fail since device is offline + await init_integration(hass, 2, sleep_period=1000) + await hass.async_block_till_done(wait_background_tasks=True) + + assert "will resume when device is online" in caplog.text + assert "is online (source: setup)" in caplog.text + assert hass.states.get("sensor.test_name_temperature") is None + + # Create an online event and verify that device is init successfully + monkeypatch.setattr(mock_rpc_device, "initialize", AsyncMock()) + mock_rpc_device.mock_online() + await hass.async_block_till_done(wait_background_tasks=True) + + assert hass.states.get("sensor.test_name_temperature") is not None + + @pytest.mark.parametrize( ("gen", "entity_id"), [ diff --git a/tests/components/shelly/test_number.py b/tests/components/shelly/test_number.py index 73f432094b9..6c1cc394b64 100644 --- a/tests/components/shelly/test_number.py +++ b/tests/components/shelly/test_number.py @@ -72,7 +72,7 @@ async def test_block_restored_number( ) -> None: """Test block restored number.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) capabilities = { "min": 0, "max": 100, @@ -86,6 +86,7 @@ async def test_block_restored_number( "device_0-valvePos", entry, capabilities, + device_id=device.id, ) extra_data = { "native_max_value": 100, @@ -118,7 +119,7 @@ async def test_block_restored_number_no_last_state( ) -> None: """Test block restored number missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) capabilities = { "min": 0, "max": 100, @@ -132,6 +133,7 @@ async def test_block_restored_number_no_last_state( "device_0-valvePos", entry, capabilities, + device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/shelly/test_sensor.py b/tests/components/shelly/test_sensor.py index 2da82a5da87..ef8a609998a 100644 --- a/tests/components/shelly/test_sensor.py +++ b/tests/components/shelly/test_sensor.py @@ -25,8 +25,12 @@ from homeassistant.const import ( PERCENTAGE, STATE_UNAVAILABLE, STATE_UNKNOWN, + UnitOfElectricCurrent, + UnitOfElectricPotential, UnitOfEnergy, UnitOfFrequency, + UnitOfPower, + UnitOfTemperature, ) from homeassistant.core import HomeAssistant, State from homeassistant.helpers.device_registry import DeviceRegistry @@ -189,9 +193,14 @@ async def test_block_restored_sleeping_sensor( ) -> None: """Test block restored sleeping sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry + hass, + SENSOR_DOMAIN, + "test_name_temperature", + "sensor_0-temp", + entry, + device_id=device.id, ) extra_data = {"native_value": "20.4", "native_unit_of_measurement": "°C"} @@ -222,9 +231,14 @@ async def test_block_restored_sleeping_sensor_no_last_state( ) -> None: """Test block restored sleeping sensor missing last state.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry + hass, + SENSOR_DOMAIN, + "test_name_temperature", + "sensor_0-temp", + entry, + device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) @@ -289,9 +303,14 @@ async def test_block_not_matched_restored_sleeping_sensor( ) -> None: """Test block not matched to restored sleeping sensor.""" entry = await init_integration(hass, 1, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( - hass, SENSOR_DOMAIN, "test_name_temperature", "sensor_0-temp", entry + hass, + SENSOR_DOMAIN, + "test_name_temperature", + "sensor_0-temp", + entry, + device_id=device.id, ) extra_data = {"native_value": "20.4", "native_unit_of_measurement": "°C"} @@ -485,13 +504,14 @@ async def test_rpc_restored_sleeping_sensor( ) -> None: """Test RPC restored sensor.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, SENSOR_DOMAIN, "test_name_temperature", "temperature:0-temperature_0", entry, + device_id=device.id, ) extra_data = {"native_value": "21.0", "native_unit_of_measurement": "°C"} @@ -523,13 +543,14 @@ async def test_rpc_restored_sleeping_sensor_no_last_state( ) -> None: """Test RPC restored sensor missing last state.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, SENSOR_DOMAIN, "test_name_temperature", "temperature:0-temperature_0", entry, + device_id=device.id, ) monkeypatch.setattr(mock_rpc_device, "initialized", False) @@ -685,10 +706,27 @@ async def test_block_sleeping_update_entity_service( ) +@pytest.mark.parametrize( + ("original_unit", "expected_unit"), + [ + ("m/s", "m/s"), + (None, None), + ("", None), + ], +) async def test_rpc_analog_input_sensors( - hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry + hass: HomeAssistant, + mock_rpc_device: Mock, + entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, + original_unit: str | None, + expected_unit: str | None, ) -> None: """Test RPC analog input xpercent sensor.""" + config = deepcopy(mock_rpc_device.config) + config["input:1"]["xpercent"] = {"expr": "x*0.2995", "unit": original_unit} + monkeypatch.setattr(mock_rpc_device, "config", config) + await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.test_name_analog_input" @@ -699,7 +737,10 @@ async def test_rpc_analog_input_sensors( assert entry.unique_id == "123456789ABC-input:1-analoginput" entity_id = f"{SENSOR_DOMAIN}.test_name_analog_value" - assert hass.states.get(entity_id).state == "8.9" + state = hass.states.get(entity_id) + assert state + assert state.state == "8.9" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit entry = entity_registry.async_get(entity_id) assert entry @@ -743,13 +784,27 @@ async def test_rpc_disabled_xpercent( assert hass.states.get(entity_id) is None +@pytest.mark.parametrize( + ("original_unit", "expected_unit"), + [ + ("l/h", "l/h"), + (None, None), + ("", None), + ], +) async def test_rpc_pulse_counter_sensors( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, monkeypatch: pytest.MonkeyPatch, + original_unit: str | None, + expected_unit: str | None, ) -> None: """Test RPC counter sensor.""" + config = deepcopy(mock_rpc_device.config) + config["input:2"]["xcounts"] = {"expr": "x/10", "unit": original_unit} + monkeypatch.setattr(mock_rpc_device, "config", config) + await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter" @@ -763,7 +818,10 @@ async def test_rpc_pulse_counter_sensors( assert entry.unique_id == "123456789ABC-input:2-pulse_counter" entity_id = f"{SENSOR_DOMAIN}.gas_counter_value" - assert hass.states.get(entity_id).state == "561.74" + state = hass.states.get(entity_id) + assert state + assert state.state == "561.74" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit entry = entity_registry.async_get(entity_id) assert entry @@ -807,12 +865,27 @@ async def test_rpc_disabled_xtotal_counter( assert hass.states.get(entity_id) is None +@pytest.mark.parametrize( + ("original_unit", "expected_unit"), + [ + ("W", "W"), + (None, None), + ("", None), + ], +) async def test_rpc_pulse_counter_frequency_sensors( hass: HomeAssistant, mock_rpc_device: Mock, entity_registry: EntityRegistry, + monkeypatch: pytest.MonkeyPatch, + original_unit: str | None, + expected_unit: str | None, ) -> None: """Test RPC counter sensor.""" + config = deepcopy(mock_rpc_device.config) + config["input:2"]["xfreq"] = {"expr": "x**2", "unit": original_unit} + monkeypatch.setattr(mock_rpc_device, "config", config) + await init_integration(hass, 2) entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency" @@ -826,7 +899,10 @@ async def test_rpc_pulse_counter_frequency_sensors( assert entry.unique_id == "123456789ABC-input:2-counter_frequency" entity_id = f"{SENSOR_DOMAIN}.gas_pulse_counter_frequency_value" - assert hass.states.get(entity_id).state == "6.11" + state = hass.states.get(entity_id) + assert state + assert state.state == "6.11" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == expected_unit entry = entity_registry.async_get(entity_id) assert entry @@ -1191,6 +1267,92 @@ async def test_rpc_remove_enum_virtual_sensor_when_orphaned( assert not entry +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.parametrize("light_type", ["rgb", "rgbw"]) +async def test_rpc_rgbw_sensors( + hass: HomeAssistant, + entity_registry: EntityRegistry, + mock_rpc_device: Mock, + monkeypatch: pytest.MonkeyPatch, + light_type: str, +) -> None: + """Test sensors for RGB/RGBW light.""" + config = deepcopy(mock_rpc_device.config) + config[f"{light_type}:0"] = {"id": 0} + monkeypatch.setattr(mock_rpc_device, "config", config) + + status = deepcopy(mock_rpc_device.status) + status[f"{light_type}:0"] = { + "temperature": {"tC": 54.3, "tF": 129.7}, + "aenergy": {"total": 45.141}, + "apower": 12.2, + "current": 0.23, + "voltage": 12.4, + } + monkeypatch.setattr(mock_rpc_device, "status", status) + + await init_integration(hass, 2) + + entity_id = "sensor.test_name_power" + + state = hass.states.get(entity_id) + assert state + assert state.state == "12.2" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfPower.WATT + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-power_{light_type}" + + entity_id = "sensor.test_name_energy" + + state = hass.states.get(entity_id) + assert state + assert state.state == "0.045141" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfEnergy.KILO_WATT_HOUR + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-energy_{light_type}" + + entity_id = "sensor.test_name_current" + + state = hass.states.get(entity_id) + assert state + assert state.state == "0.23" + assert ( + state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricCurrent.AMPERE + ) + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-current_{light_type}" + + entity_id = "sensor.test_name_voltage" + + state = hass.states.get(entity_id) + assert state + assert state.state == "12.4" + assert ( + state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfElectricPotential.VOLT + ) + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-voltage_{light_type}" + + entity_id = "sensor.test_name_device_temperature" + + state = hass.states.get(entity_id) + assert state + assert state.state == "54.3" + assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == UnitOfTemperature.CELSIUS + + entry = entity_registry.async_get(entity_id) + assert entry + assert entry.unique_id == f"123456789ABC-{light_type}:0-temperature_{light_type}" + + async def test_rpc_device_sensor_goes_unavailable_on_disconnect( hass: HomeAssistant, mock_rpc_device: Mock, diff --git a/tests/components/shelly/test_switch.py b/tests/components/shelly/test_switch.py index 124562be8d5..c891d1d7b2d 100644 --- a/tests/components/shelly/test_switch.py +++ b/tests/components/shelly/test_switch.py @@ -118,13 +118,14 @@ async def test_block_restored_motion_switch( entry = await init_integration( hass, 1, sleep_period=1000, model=model, skip_setup=True ) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, SWITCH_DOMAIN, "test_name_motion_detection", "sensor_0-motionActive", entry, + device_id=device.id, ) mock_restore_cache(hass, [State(entity_id, STATE_OFF)]) @@ -154,13 +155,14 @@ async def test_block_restored_motion_switch_no_last_state( entry = await init_integration( hass, 1, sleep_period=1000, model=model, skip_setup=True ) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, SWITCH_DOMAIN, "test_name_motion_detection", "sensor_0-motionActive", entry, + device_id=device.id, ) monkeypatch.setattr(mock_block_device, "initialized", False) await hass.config_entries.async_setup(entry.entry_id) diff --git a/tests/components/shelly/test_update.py b/tests/components/shelly/test_update.py index 721e86559a3..c6434c0b988 100644 --- a/tests/components/shelly/test_update.py +++ b/tests/components/shelly/test_update.py @@ -385,13 +385,14 @@ async def test_rpc_restored_sleeping_update( ) -> None: """Test RPC restored update entity.""" entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, UPDATE_DOMAIN, "test_name_firmware_update", "sys-fwupdate", entry, + device_id=device.id, ) attr = {ATTR_INSTALLED_VERSION: "1", ATTR_LATEST_VERSION: "2"} @@ -443,13 +444,14 @@ async def test_rpc_restored_sleeping_update_no_last_state( }, ) entry = await init_integration(hass, 2, sleep_period=1000, skip_setup=True) - register_device(device_registry, entry) + device = register_device(device_registry, entry) entity_id = register_entity( hass, UPDATE_DOMAIN, "test_name_firmware_update", "sys-fwupdate", entry, + device_id=device.id, ) monkeypatch.setattr(mock_rpc_device, "initialized", False) diff --git a/tests/components/sia/test_config_flow.py b/tests/components/sia/test_config_flow.py index 95de53d7fbe..b0d83855a25 100644 --- a/tests/components/sia/test_config_flow.py +++ b/tests/components/sia/test_config_flow.py @@ -1,5 +1,6 @@ """Test the sia config flow.""" +from collections.abc import Generator from unittest.mock import patch import pytest @@ -16,6 +17,7 @@ from homeassistant.components.sia.const import ( CONF_ZONES, DOMAIN, ) +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_PORT, CONF_PROTOCOL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -105,7 +107,7 @@ ADDITIONAL_OPTIONS = { @pytest.fixture -async def flow_at_user_step(hass): +async def flow_at_user_step(hass: HomeAssistant) -> ConfigFlowResult: """Return a initialized flow.""" return await hass.config_entries.flow.async_init( DOMAIN, @@ -114,7 +116,9 @@ async def flow_at_user_step(hass): @pytest.fixture -async def entry_with_basic_config(hass, flow_at_user_step): +async def entry_with_basic_config( + hass: HomeAssistant, flow_at_user_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a entry with a basic config.""" with patch("homeassistant.components.sia.async_setup_entry", return_value=True): return await hass.config_entries.flow.async_configure( @@ -123,7 +127,9 @@ async def entry_with_basic_config(hass, flow_at_user_step): @pytest.fixture -async def flow_at_add_account_step(hass, flow_at_user_step): +async def flow_at_add_account_step( + hass: HomeAssistant, flow_at_user_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a initialized flow at the additional account step.""" return await hass.config_entries.flow.async_configure( flow_at_user_step["flow_id"], BASIC_CONFIG_ADDITIONAL @@ -131,7 +137,9 @@ async def flow_at_add_account_step(hass, flow_at_user_step): @pytest.fixture -async def entry_with_additional_account_config(hass, flow_at_add_account_step): +async def entry_with_additional_account_config( + hass: HomeAssistant, flow_at_add_account_step: ConfigFlowResult +) -> ConfigFlowResult: """Return a entry with a two account config.""" with patch("homeassistant.components.sia.async_setup_entry", return_value=True): return await hass.config_entries.flow.async_configure( @@ -139,7 +147,7 @@ async def entry_with_additional_account_config(hass, flow_at_add_account_step): ) -async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry): +async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Add mock config to HASS.""" assert await async_setup_component(hass, DOMAIN, {}) config_entry.add_to_hass(hass) @@ -147,23 +155,21 @@ async def setup_sia(hass: HomeAssistant, config_entry: MockConfigEntry): await hass.async_block_till_done() -async def test_form_start_user(hass: HomeAssistant, flow_at_user_step) -> None: +async def test_form_start_user(flow_at_user_step: ConfigFlowResult) -> None: """Start the form and check if you get the right id and schema for the user step.""" assert flow_at_user_step["step_id"] == "user" assert flow_at_user_step["errors"] is None assert flow_at_user_step["data_schema"] == HUB_SCHEMA -async def test_form_start_account( - hass: HomeAssistant, flow_at_add_account_step -) -> None: +async def test_form_start_account(flow_at_add_account_step: ConfigFlowResult) -> None: """Start the form and check if you get the right id and schema for the additional account step.""" assert flow_at_add_account_step["step_id"] == "add_account" assert flow_at_add_account_step["errors"] is None assert flow_at_add_account_step["data_schema"] == ACCOUNT_SCHEMA -async def test_create(hass: HomeAssistant, entry_with_basic_config) -> None: +async def test_create(entry_with_basic_config: ConfigFlowResult) -> None: """Test we create a entry through the form.""" assert entry_with_basic_config["type"] is FlowResultType.CREATE_ENTRY assert ( @@ -175,7 +181,7 @@ async def test_create(hass: HomeAssistant, entry_with_basic_config) -> None: async def test_create_additional_account( - hass: HomeAssistant, entry_with_additional_account_config + entry_with_additional_account_config: ConfigFlowResult, ) -> None: """Test we create a config with two accounts.""" assert entry_with_additional_account_config["type"] is FlowResultType.CREATE_ENTRY @@ -210,7 +216,7 @@ async def test_abort_form(hass: HomeAssistant) -> None: @pytest.fixture(autouse=True) -def mock_sia(): +def mock_sia() -> Generator[None]: """Mock SIAClient.""" with patch("homeassistant.components.sia.hub.SIAClient", autospec=True): yield diff --git a/tests/components/simplefin/snapshots/test_binary_sensor.ambr b/tests/components/simplefin/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..be26ae1a03d --- /dev/null +++ b/tests/components/simplefin/snapshots/test_binary_sensor.ambr @@ -0,0 +1,769 @@ +# serializer version: 1 +# name: test_all_entities[binary_sensor.investments_dr_evil_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_dr_evil_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_dr_evil_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments Dr Evil Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_dr_evil_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_dr_evil_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_dr_evil_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-4k5l6m7n-8o9p-1q2r-3s4t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_dr_evil_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments Dr Evil Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_dr_evil_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_my_checking_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_my_checking_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_my_checking_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments My Checking Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_my_checking_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_my_checking_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_my_checking_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-1k2l3m4n-5o6p-7q8r-9s0t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_my_checking_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments My Checking Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_my_checking_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments NerdCorp Series B Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-5k6l7m8n-9o0p-1q2r-3s4t_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.investments_nerdcorp_series_b_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Investments NerdCorp Series B Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.investments_nerdcorp_series_b_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-7a8b9c0d-1e2f-3g4h-5i6j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_castle_mortgage_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Mythical RandomSavings Castle Mortgage Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.mythical_randomsavings_castle_mortgage_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-6a7b8c9d-0e1f-2g3h-4i5j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.mythical_randomsavings_unicorn_pot_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Mythical RandomSavings Unicorn Pot Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.mythical_randomsavings_unicorn_pot_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-3a4b5c6d-7e8f-9g0h-1i2j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.random_bank_costco_anywhere_visa_r_card_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'Random Bank Costco Anywhere Visa® Card Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.random_bank_costco_anywhere_visa_r_card_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-2a3b4c5d-6e7f-8g9h-0i1j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_prime_savings_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'The Bank of Go PRIME SAVINGS Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.the_bank_of_go_prime_savings_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_possible_error-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_possible_error', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Possible error', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_possible_error-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'The Bank of Go The Bank Possible error', + }), + 'context': , + 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_possible_error', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_problem-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': , + 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_problem', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Problem', + 'platform': 'simplefin', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'possible_error', + 'unique_id': 'account_ACT-1a2b3c4d-5e6f-7g8h-9i0j_possible_error', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.the_bank_of_go_the_bank_problem-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'attribution': 'Data provided by SimpleFIN API', + 'device_class': 'problem', + 'friendly_name': 'The Bank of Go The Bank Problem', + }), + 'context': , + 'entity_id': 'binary_sensor.the_bank_of_go_the_bank_problem', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/simplefin/test_binary_sensor.py b/tests/components/simplefin/test_binary_sensor.py new file mode 100644 index 00000000000..40c6882153d --- /dev/null +++ b/tests/components/simplefin/test_binary_sensor.py @@ -0,0 +1,29 @@ +"""Test SimpleFin Sensor with Snapshot data.""" + +from unittest.mock import AsyncMock, patch + +from syrupy import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + + +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_simplefin_client: AsyncMock, +) -> None: + """Test all entities.""" + with patch( + "homeassistant.components.simplefin.PLATFORMS", [Platform.BINARY_SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/simplisafe/test_config_flow.py b/tests/components/simplisafe/test_config_flow.py index dde7e37b891..9270fc43c30 100644 --- a/tests/components/simplisafe/test_config_flow.py +++ b/tests/components/simplisafe/test_config_flow.py @@ -8,11 +8,13 @@ from simplipy.errors import InvalidCredentialsError, SimplipyError from homeassistant.components.simplisafe import DOMAIN from homeassistant.components.simplisafe.config_flow import CONF_AUTH_CODE -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_CODE, CONF_TOKEN, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + VALID_AUTH_CODE = "code12345123451234512345123451234512345123451" @@ -90,13 +92,11 @@ async def test_options_flow(config_entry, hass: HomeAssistant) -> None: assert config_entry.options == {CONF_CODE: "4321"} -async def test_step_reauth(config_entry, hass: HomeAssistant, setup_simplisafe) -> None: +async def test_step_reauth( + config_entry: MockConfigEntry, hass: HomeAssistant, setup_simplisafe +) -> None: """Test the re-auth step.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data={CONF_USERNAME: "12345", CONF_TOKEN: "token123"}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "user" with ( @@ -118,14 +118,10 @@ async def test_step_reauth(config_entry, hass: HomeAssistant, setup_simplisafe) @pytest.mark.parametrize("unique_id", ["some_other_id"]) async def test_step_reauth_wrong_account( - config_entry, hass: HomeAssistant, setup_simplisafe + config_entry: MockConfigEntry, hass: HomeAssistant, setup_simplisafe ) -> None: """Test the re-auth step where the wrong account is used during login.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH}, - data={CONF_USERNAME: "12345", CONF_TOKEN: "token123"}, - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "user" with ( diff --git a/tests/components/siren/test_init.py b/tests/components/siren/test_init.py index 168300d0abe..475b32540b4 100644 --- a/tests/components/siren/test_init.py +++ b/tests/components/siren/test_init.py @@ -27,7 +27,7 @@ class MockSirenEntity(SirenEntity): supported_features=0, available_tones_as_attr=None, available_tones_in_desc=None, - ): + ) -> None: """Initialize mock siren entity.""" self._attr_supported_features = supported_features if available_tones_as_attr is not None: diff --git a/tests/components/skybell/test_config_flow.py b/tests/components/skybell/test_config_flow.py index cb62f808efc..f415fef077e 100644 --- a/tests/components/skybell/test_config_flow.py +++ b/tests/components/skybell/test_config_flow.py @@ -5,10 +5,9 @@ from unittest.mock import patch from aioskybell import exceptions import pytest -from homeassistant import config_entries from homeassistant.components.skybell.const import DOMAIN from homeassistant.config_entries import SOURCE_USER -from homeassistant.const import CONF_PASSWORD, CONF_SOURCE +from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -104,15 +103,7 @@ async def test_step_reauth(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -130,15 +121,7 @@ async def test_step_reauth_failed(hass: HomeAssistant, skybell_mock) -> None: entry = MockConfigEntry(domain=DOMAIN, unique_id=USER_ID, data=CONF_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/sleepiq/test_config_flow.py b/tests/components/sleepiq/test_config_flow.py index af08f5aa9fe..26007d42e7d 100644 --- a/tests/components/sleepiq/test_config_flow.py +++ b/tests/components/sleepiq/test_config_flow.py @@ -101,19 +101,7 @@ async def test_reauth_password(hass: HomeAssistant) -> None: # set up initially entry = await setup_platform(hass) - with patch( - "homeassistant.components.sleepiq.config_flow.AsyncSleepIQ.login", - side_effect=SleepIQLoginException, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.sleepiq.config_flow.AsyncSleepIQ.login", diff --git a/tests/components/sma/conftest.py b/tests/components/sma/conftest.py index a98eda673e4..a54f478a31d 100644 --- a/tests/components/sma/conftest.py +++ b/tests/components/sma/conftest.py @@ -9,6 +9,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.sma.const import DOMAIN +from homeassistant.core import HomeAssistant from . import MOCK_DEVICE, MOCK_USER_INPUT @@ -16,7 +17,7 @@ from tests.common import MockConfigEntry @pytest.fixture -def mock_config_entry(): +def mock_config_entry() -> MockConfigEntry: """Return the default mocked config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -28,7 +29,9 @@ def mock_config_entry(): @pytest.fixture -async def init_integration(hass, mock_config_entry): +async def init_integration( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> MockConfigEntry: """Create a fake SMA Config Entry.""" mock_config_entry.add_to_hass(hass) diff --git a/tests/components/smart_meter_texas/conftest.py b/tests/components/smart_meter_texas/conftest.py index d06571fe05e..14ba6199c3d 100644 --- a/tests/components/smart_meter_texas/conftest.py +++ b/tests/components/smart_meter_texas/conftest.py @@ -2,6 +2,7 @@ from http import HTTPStatus import json +from typing import Any import pytest from smart_meter_texas.const import ( @@ -19,9 +20,11 @@ from homeassistant.components.homeassistant import ( ) from homeassistant.components.smart_meter_texas.const import DOMAIN from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_fixture +from tests.test_util.aiohttp import AiohttpClientMocker TEST_ENTITY_ID = "sensor.electric_meter_123456789" @@ -32,14 +35,23 @@ def load_smt_fixture(name): return json.loads(json_fixture) -async def setup_integration(hass, config_entry, aioclient_mock, **kwargs): +async def setup_integration( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, + **kwargs: Any, +) -> None: """Initialize the Smart Meter Texas integration for testing.""" mock_connection(aioclient_mock, **kwargs) await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() -async def refresh_data(hass, config_entry, aioclient_mock): +async def refresh_data( + hass: HomeAssistant, + config_entry: MockConfigEntry, + aioclient_mock: AiohttpClientMocker, +) -> None: """Request a DataUpdateCoordinator refresh.""" mock_connection(aioclient_mock) await async_setup_component(hass, HA_DOMAIN, {}) @@ -91,7 +103,7 @@ def mock_connection( @pytest.fixture(name="config_entry") -def mock_config_entry(hass): +def mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Return a mock config entry.""" config_entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/smartthings/conftest.py b/tests/components/smartthings/conftest.py index 17e2c781989..70fd9db0744 100644 --- a/tests/components/smartthings/conftest.py +++ b/tests/components/smartthings/conftest.py @@ -91,7 +91,7 @@ async def setup_component( await async_setup_component(hass, "smartthings", {}) -def _create_location(): +def _create_location() -> Mock: loc = Mock(Location) loc.name = "Test Location" loc.location_id = str(uuid4()) @@ -99,19 +99,19 @@ def _create_location(): @pytest.fixture(name="location") -def location_fixture(): +def location_fixture() -> Mock: """Fixture for a single location.""" return _create_location() @pytest.fixture(name="locations") -def locations_fixture(location): +def locations_fixture(location: Mock) -> list[Mock]: """Fixture for 2 locations.""" return [location, _create_location()] @pytest.fixture(name="app") -async def app_fixture(hass, config_file): +async def app_fixture(hass: HomeAssistant, config_file: dict[str, str]) -> Mock: """Fixture for a single app.""" app = Mock(AppEntity) app.app_name = APP_NAME_PREFIX + str(uuid4()) @@ -133,7 +133,7 @@ async def app_fixture(hass, config_file): @pytest.fixture(name="app_oauth_client") -def app_oauth_client_fixture(): +def app_oauth_client_fixture() -> Mock: """Fixture for a single app's oauth.""" client = Mock(AppOAuthClient) client.client_id = str(uuid4()) @@ -150,7 +150,7 @@ def app_settings_fixture(app, config_file): return settings -def _create_installed_app(location_id, app_id): +def _create_installed_app(location_id: str, app_id: str) -> Mock: item = Mock(InstalledApp) item.installed_app_id = str(uuid4()) item.installed_app_status = InstalledAppStatus.AUTHORIZED @@ -161,7 +161,7 @@ def _create_installed_app(location_id, app_id): @pytest.fixture(name="installed_app") -def installed_app_fixture(location, app): +def installed_app_fixture(location: Mock, app: Mock) -> Mock: """Fixture for a single installed app.""" return _create_installed_app(location.location_id, app.app_id) @@ -222,7 +222,7 @@ def device_fixture(location): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, installed_app, location): +def config_entry_fixture(installed_app: Mock, location: Mock) -> MockConfigEntry: """Fixture representing a config entry.""" data = { CONF_ACCESS_TOKEN: str(uuid4()), diff --git a/tests/components/smartthings/test_init.py b/tests/components/smartthings/test_init.py index ae8a288e3a5..fa30fa258cf 100644 --- a/tests/components/smartthings/test_init.py +++ b/tests/components/smartthings/test_init.py @@ -1,6 +1,9 @@ """Tests for the SmartThings component init module.""" +from collections.abc import Callable, Coroutine +from datetime import datetime, timedelta from http import HTTPStatus +from typing import Any from unittest.mock import Mock, patch from uuid import uuid4 @@ -419,7 +422,11 @@ async def test_broker_regenerates_token(hass: HomeAssistant, config_entry) -> No stored_action = None config_entry.add_to_hass(hass) - def async_track_time_interval(hass, action, interval): + def async_track_time_interval( + hass: HomeAssistant, + action: Callable[[datetime], Coroutine[Any, Any, None] | None], + interval: timedelta, + ) -> None: nonlocal stored_action stored_action = action diff --git a/tests/components/smarttub/conftest.py b/tests/components/smarttub/conftest.py index c05762a903d..06780f8fb1e 100644 --- a/tests/components/smarttub/conftest.py +++ b/tests/components/smarttub/conftest.py @@ -1,5 +1,6 @@ """Common fixtures for smarttub tests.""" +from typing import Any from unittest.mock import create_autospec, patch import pytest @@ -7,19 +8,20 @@ import smarttub from homeassistant.components.smarttub.const import DOMAIN from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @pytest.fixture -def config_data(): +def config_data() -> dict[str, Any]: """Provide configuration data for tests.""" return {CONF_EMAIL: "test-email", CONF_PASSWORD: "test-password"} @pytest.fixture -def config_entry(config_data): +def config_entry(config_data: dict[str, Any]) -> MockConfigEntry: """Create a mock config entry.""" return MockConfigEntry( domain=DOMAIN, @@ -29,7 +31,7 @@ def config_entry(config_data): @pytest.fixture -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Set up the component.""" assert await async_setup_component(hass, DOMAIN, {}) is True @@ -162,7 +164,7 @@ def mock_api(account, spa): @pytest.fixture -async def setup_entry(hass, config_entry): +async def setup_entry(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Initialize the config entry.""" config_entry.add_to_hass(hass) await hass.config_entries.async_setup(config_entry.entry_id) diff --git a/tests/components/smarttub/test_config_flow.py b/tests/components/smarttub/test_config_flow.py index c625f217405..5832841641c 100644 --- a/tests/components/smarttub/test_config_flow.py +++ b/tests/components/smarttub/test_config_flow.py @@ -66,15 +66,7 @@ async def test_reauth_success(hass: HomeAssistant, smarttub_api, account) -> Non ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -107,15 +99,7 @@ async def test_reauth_wrong_account(hass: HomeAssistant, smarttub_api, account) # we try to reauth account #2, and the user successfully authenticates to account #1 account.id = mock_entry1.unique_id - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry2.unique_id, - "entry_id": mock_entry2.entry_id, - }, - data=mock_entry2.data, - ) + result = await mock_entry2.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/smhi/snapshots/test_weather.ambr b/tests/components/smhi/snapshots/test_weather.ambr index d825e22d470..9ab0375df83 100644 --- a/tests/components/smhi/snapshots/test_weather.ambr +++ b/tests/components/smhi/snapshots/test_weather.ambr @@ -80,142 +80,6 @@ 'wind_speed_unit': , }) # --- -# name: test_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-07T12:00:00', - 'humidity': 96, - 'precipitation': 0.0, - 'pressure': 991.0, - 'temperature': 18.0, - 'templow': 15.0, - 'wind_bearing': 114, - 'wind_gust_speed': 32.76, - 'wind_speed': 10.08, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'rainy', - 'datetime': '2023-08-08T12:00:00', - 'humidity': 97, - 'precipitation': 10.6, - 'pressure': 984.0, - 'temperature': 15.0, - 'templow': 11.0, - 'wind_bearing': 183, - 'wind_gust_speed': 27.36, - 'wind_speed': 11.16, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'rainy', - 'datetime': '2023-08-09T12:00:00', - 'humidity': 95, - 'precipitation': 6.3, - 'pressure': 1001.0, - 'temperature': 12.0, - 'templow': 11.0, - 'wind_bearing': 166, - 'wind_gust_speed': 48.24, - 'wind_speed': 18.0, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-10T12:00:00', - 'humidity': 75, - 'precipitation': 4.8, - 'pressure': 1011.0, - 'temperature': 14.0, - 'templow': 10.0, - 'wind_bearing': 174, - 'wind_gust_speed': 29.16, - 'wind_speed': 11.16, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-11T12:00:00', - 'humidity': 69, - 'precipitation': 0.6, - 'pressure': 1015.0, - 'temperature': 18.0, - 'templow': 12.0, - 'wind_bearing': 197, - 'wind_gust_speed': 27.36, - 'wind_speed': 10.08, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'cloudy', - 'datetime': '2023-08-12T12:00:00', - 'humidity': 82, - 'precipitation': 0.0, - 'pressure': 1014.0, - 'temperature': 17.0, - 'templow': 12.0, - 'wind_bearing': 225, - 'wind_gust_speed': 28.08, - 'wind_speed': 8.64, - }), - dict({ - 'cloud_coverage': 75, - 'condition': 'partlycloudy', - 'datetime': '2023-08-13T12:00:00', - 'humidity': 59, - 'precipitation': 0.0, - 'pressure': 1013.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 234, - 'wind_gust_speed': 35.64, - 'wind_speed': 14.76, - }), - dict({ - 'cloud_coverage': 100, - 'condition': 'partlycloudy', - 'datetime': '2023-08-14T12:00:00', - 'humidity': 56, - 'precipitation': 0.0, - 'pressure': 1015.0, - 'temperature': 21.0, - 'templow': 14.0, - 'wind_bearing': 216, - 'wind_gust_speed': 33.12, - 'wind_speed': 13.68, - }), - dict({ - 'cloud_coverage': 88, - 'condition': 'partlycloudy', - 'datetime': '2023-08-15T12:00:00', - 'humidity': 64, - 'precipitation': 3.6, - 'pressure': 1014.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 226, - 'wind_gust_speed': 33.12, - 'wind_speed': 13.68, - }), - dict({ - 'cloud_coverage': 75, - 'condition': 'partlycloudy', - 'datetime': '2023-08-16T12:00:00', - 'humidity': 61, - 'precipitation': 2.4, - 'pressure': 1014.0, - 'temperature': 20.0, - 'templow': 14.0, - 'wind_bearing': 233, - 'wind_gust_speed': 33.48, - 'wind_speed': 14.04, - }), - ]), - }) -# --- # name: test_forecast_service[get_forecasts] dict({ 'weather.smhi_test': dict({ diff --git a/tests/components/smlight/__init__.py b/tests/components/smlight/__init__.py new file mode 100644 index 00000000000..37184226507 --- /dev/null +++ b/tests/components/smlight/__init__.py @@ -0,0 +1 @@ +"""Tests for the SMLIGHT Zigbee adapter integration.""" diff --git a/tests/components/smlight/conftest.py b/tests/components/smlight/conftest.py new file mode 100644 index 00000000000..0338bf4b672 --- /dev/null +++ b/tests/components/smlight/conftest.py @@ -0,0 +1,74 @@ +"""Common fixtures for the SMLIGHT Zigbee tests.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, MagicMock, patch + +from pysmlight.web import Info, Sensors +import pytest + +from homeassistant.components.smlight.const import DOMAIN +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry, load_json_object_fixture + +MOCK_HOST = "slzb-06.local" +MOCK_USERNAME = "test-user" +MOCK_PASSWORD = "test-pass" + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data={ + CONF_HOST: MOCK_HOST, + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + unique_id="aa:bb:cc:dd:ee:ff", + ) + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock, None, None]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.smlight.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_smlight_client(request: pytest.FixtureRequest) -> Generator[MagicMock]: + """Mock the SMLIGHT API client.""" + with ( + patch( + "homeassistant.components.smlight.coordinator.Api2", autospec=True + ) as smlight_mock, + patch("homeassistant.components.smlight.config_flow.Api2", new=smlight_mock), + ): + api = smlight_mock.return_value + api.host = MOCK_HOST + api.get_info.return_value = Info.from_dict( + load_json_object_fixture("info.json", DOMAIN) + ) + api.get_sensors.return_value = Sensors.from_dict( + load_json_object_fixture("sensors.json", DOMAIN) + ) + + api.check_auth_needed.return_value = False + api.authenticate.return_value = True + + yield api + + +async def setup_integration(hass: HomeAssistant, mock_config_entry: MockConfigEntry): + """Set up the integration.""" + mock_config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + return mock_config_entry diff --git a/tests/components/smlight/fixtures/info.json b/tests/components/smlight/fixtures/info.json new file mode 100644 index 00000000000..72bb7c1ed9b --- /dev/null +++ b/tests/components/smlight/fixtures/info.json @@ -0,0 +1,16 @@ +{ + "coord_mode": 0, + "device_ip": "192.168.1.161", + "fs_total": 3456, + "fw_channel": "dev", + "MAC": "AA:BB:CC:DD:EE:FF", + "model": "SLZB-06p7", + "ram_total": 296, + "sw_version": "v2.3.1.dev", + "wifi_mode": 0, + "zb_flash_size": 704, + "zb_hw": "CC2652P7", + "zb_ram_size": 152, + "zb_version": -1, + "zb_type": -1 +} diff --git a/tests/components/smlight/fixtures/sensors.json b/tests/components/smlight/fixtures/sensors.json new file mode 100644 index 00000000000..0b2f9055e01 --- /dev/null +++ b/tests/components/smlight/fixtures/sensors.json @@ -0,0 +1,14 @@ +{ + "esp32_temp": 35.0, + "zb_temp": 32.7, + "uptime": 508125, + "socket_uptime": 127, + "ram_usage": 99, + "fs_used": 188, + "ethernet": true, + "wifi_connected": false, + "wifi_status": 255, + "disable_leds": false, + "night_mode": false, + "auto_zigbee": false +} diff --git a/tests/components/smlight/snapshots/test_init.ambr b/tests/components/smlight/snapshots/test_init.ambr new file mode 100644 index 00000000000..528a7b7b340 --- /dev/null +++ b/tests/components/smlight/snapshots/test_init.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_device_info + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'Mock Title', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/smlight/snapshots/test_sensor.ambr b/tests/components/smlight/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..0ff3d37b735 --- /dev/null +++ b/tests/components/smlight/snapshots/test_sensor.ambr @@ -0,0 +1,741 @@ +# serializer version: 1 +# name: test_sensors[sensor.mock_title_core_chip_temp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_core_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.mock_title_core_chip_temp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Core chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_core_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.0', + }) +# --- +# name: test_sensors[sensor.mock_title_filesystem_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_filesystem_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filesystem usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fs_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.mock_title_filesystem_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Mock Title Filesystem usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_filesystem_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '188', + }) +# --- +# name: test_sensors[sensor.mock_title_ram_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_ram_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RAM usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ram_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.mock_title_ram_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'Mock Title RAM usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_ram_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99', + }) +# --- +# name: test_sensors[sensor.mock_title_zigbee_chip_temp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_title_zigbee_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Zigbee chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'zigbee_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.mock_title_zigbee_chip_temp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Mock Title Zigbee chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_title_zigbee_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.7', + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'slzb-06 Core chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.0', + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'slzb-06 Core chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '35.0', + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_core_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Core chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'core_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_core_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_core_chip_temp].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'slzb-06', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filesystem usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fs_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'slzb-06 Filesystem usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '188', + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'slzb-06 Filesystem usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '188', + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_filesystem_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Filesystem usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'fs_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_fs_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_filesystem_usage].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'slzb-06', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_ram_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RAM usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ram_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'slzb-06 RAM usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_ram_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99', + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_size', + 'friendly_name': 'slzb-06 RAM usage', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_ram_usage', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '99', + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_ram_usage', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'RAM usage', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'ram_usage', + 'unique_id': 'aa:bb:cc:dd:ee:ff_ram_usage', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_ram_usage].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'slzb-06', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Zigbee chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'zigbee_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'slzb-06 Zigbee chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.7', + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'slzb-06 Zigbee chip temp', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '32.7', + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp].1 + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.slzb_06_zigbee_chip_temp', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Zigbee chip temp', + 'platform': 'smlight', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'zigbee_temperature', + 'unique_id': 'aa:bb:cc:dd:ee:ff_zigbee_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_sensors[sensor.slzb_06_zigbee_chip_temp].2 + DeviceRegistryEntrySnapshot({ + 'area_id': None, + 'config_entries': , + 'configuration_url': 'http://slzb-06.local', + 'connections': set({ + tuple( + 'mac', + 'aa:bb:cc:dd:ee:ff', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'SMLIGHT', + 'model': 'SLZB-06p7', + 'model_id': None, + 'name': 'slzb-06', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': None, + 'sw_version': 'core: v2.3.1.dev / zigbee: -1', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/smlight/test_config_flow.py b/tests/components/smlight/test_config_flow.py new file mode 100644 index 00000000000..9a23a8de753 --- /dev/null +++ b/tests/components/smlight/test_config_flow.py @@ -0,0 +1,365 @@ +"""Test the SMLIGHT SLZB config flow.""" + +from ipaddress import ip_address +from unittest.mock import AsyncMock, MagicMock + +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +import pytest + +from homeassistant.components import zeroconf +from homeassistant.components.smlight.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from .conftest import MOCK_HOST, MOCK_PASSWORD, MOCK_USERNAME + +from tests.common import MockConfigEntry + +DISCOVERY_INFO = zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + hostname="slzb-06.local.", + name="mock_name", + port=6638, + properties={"mac": "AA:BB:CC:DD:EE:FF"}, + type="mock_type", +) + +DISCOVERY_INFO_LEGACY = zeroconf.ZeroconfServiceInfo( + ip_address=ip_address("127.0.0.1"), + ip_addresses=[ip_address("127.0.0.1")], + hostname="slzb-06.local.", + name="mock_name", + port=6638, + properties={}, + type="mock_type", +) + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_user_flow(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: + """Test the full manual user flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCK_HOST, + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "SLZB-06p7" + assert result2["data"] == { + CONF_HOST: MOCK_HOST, + } + assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_zeroconf_flow( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the zeroconf flow.""" + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO + ) + + assert result["description_placeholders"] == {"host": MOCK_HOST} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_discovery" + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + assert progress[0]["flow_id"] == result["flow_id"] + assert progress[0]["context"]["confirm_only"] is True + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["context"]["source"] == "zeroconf" + assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + assert result2["title"] == "SLZB-06p7" + assert result2["data"] == { + CONF_HOST: MOCK_HOST, + } + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 1 + + +async def test_zeroconf_flow_auth( + hass: HomeAssistant, + mock_smlight_client: MagicMock, + mock_setup_entry: AsyncMock, +) -> None: + """Test the full zeroconf flow including authentication.""" + mock_smlight_client.check_auth_needed.return_value = True + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_ZEROCONF}, data=DISCOVERY_INFO + ) + + assert result["description_placeholders"] == {"host": MOCK_HOST} + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_discovery" + + progress = hass.config_entries.flow.async_progress() + assert len(progress) == 1 + assert progress[0]["flow_id"] == result["flow_id"] + assert progress[0]["context"]["confirm_only"] is True + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["step_id"] == "auth" + + progress2 = hass.config_entries.flow.async_progress() + assert len(progress2) == 1 + assert progress2[0]["flow_id"] == result["flow_id"] + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + user_input={ + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["context"]["source"] == "zeroconf" + assert result3["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + assert result3["title"] == "SLZB-06p7" + assert result3["data"] == { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + CONF_HOST: MOCK_HOST, + } + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 1 + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_user_device_exists_abort( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test we abort user flow if device already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: MOCK_HOST, + }, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_zeroconf_device_exists_abort( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test we abort zeroconf flow if device already configured.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=DISCOVERY_INFO, + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" + + +async def test_user_invalid_auth( + hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle invalid auth.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightAuthError + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={ + CONF_HOST: MOCK_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test", + CONF_PASSWORD: "bad", + }, + ) + + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "invalid_auth"} + assert result2["step_id"] == "auth" + + mock_smlight_client.authenticate.side_effect = None + + result3 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: "test", + CONF_PASSWORD: "good", + }, + ) + + assert result3["type"] is FlowResultType.CREATE_ENTRY + assert result3["title"] == "SLZB-06p7" + assert result3["data"] == { + CONF_HOST: MOCK_HOST, + CONF_USERNAME: "test", + CONF_PASSWORD: "good", + } + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 1 + + +async def test_user_cannot_connect( + hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test we handle user cannot connect error.""" + mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: "unknown.local", + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": "cannot_connect"} + assert result["step_id"] == "user" + + mock_smlight_client.check_auth_needed.side_effect = None + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCK_HOST, + }, + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["title"] == "SLZB-06p7" + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 1 + + +async def test_auth_cannot_connect( + hass: HomeAssistant, mock_smlight_client: MagicMock +) -> None: + """Test we abort auth step on cannot connect error.""" + mock_smlight_client.check_auth_needed.return_value = True + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_HOST: MOCK_HOST, + }, + ) + + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "auth" + + mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + { + CONF_USERNAME: MOCK_USERNAME, + CONF_PASSWORD: MOCK_PASSWORD, + }, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "cannot_connect" + + +async def test_zeroconf_cannot_connect( + hass: HomeAssistant, mock_smlight_client: MagicMock +) -> None: + """Test we abort flow on zeroconf cannot connect error.""" + mock_smlight_client.check_auth_needed.side_effect = SmlightConnectionError + + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=DISCOVERY_INFO, + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "confirm_discovery" + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], + {}, + ) + + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "cannot_connect" + + +@pytest.mark.usefixtures("mock_smlight_client") +async def test_zeroconf_legacy_mac( + hass: HomeAssistant, mock_smlight_client: MagicMock, mock_setup_entry: AsyncMock +) -> None: + """Test we can get unique id MAC address for older firmwares.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_ZEROCONF}, + data=DISCOVERY_INFO_LEGACY, + ) + + assert result["description_placeholders"] == {"host": MOCK_HOST} + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={} + ) + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["context"]["source"] == "zeroconf" + assert result2["context"]["unique_id"] == "aa:bb:cc:dd:ee:ff" + assert result2["title"] == "SLZB-06p7" + assert result2["data"] == { + CONF_HOST: MOCK_HOST, + } + + assert len(mock_setup_entry.mock_calls) == 1 + assert len(mock_smlight_client.get_info.mock_calls) == 2 diff --git a/tests/components/smlight/test_init.py b/tests/components/smlight/test_init.py new file mode 100644 index 00000000000..682993cb943 --- /dev/null +++ b/tests/components/smlight/test_init.py @@ -0,0 +1,94 @@ +"Test SMLIGHT SLZB device integration initialization." + +from unittest.mock import MagicMock + +from freezegun.api import FrozenDateTimeFactory +from pysmlight.exceptions import SmlightAuthError, SmlightConnectionError +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.components.smlight.const import SCAN_INTERVAL +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import STATE_UNAVAILABLE +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, async_fire_time_changed + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + + +async def test_async_setup_entry( + hass: HomeAssistant, mock_config_entry: MockConfigEntry +) -> None: + """Test async_setup_entry.""" + entry = await setup_integration(hass, mock_config_entry) + + assert entry.state is ConfigEntryState.LOADED + assert entry.unique_id == "aa:bb:cc:dd:ee:ff" + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_async_setup_auth_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, +) -> None: + """Test async_setup_entry when authentication fails.""" + mock_smlight_client.check_auth_needed.return_value = True + mock_smlight_client.authenticate.side_effect = SmlightAuthError + entry = await setup_integration(hass, mock_config_entry) + + assert entry.state is ConfigEntryState.SETUP_ERROR + + assert await hass.config_entries.async_unload(entry.entry_id) + await hass.async_block_till_done() + assert entry.state is ConfigEntryState.NOT_LOADED + + +async def test_update_failed( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + mock_smlight_client: MagicMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test update failed due to connection error.""" + + await setup_integration(hass, mock_config_entry) + entity = hass.states.get("sensor.mock_title_core_chip_temp") + assert entity.state is not STATE_UNAVAILABLE + + mock_smlight_client.get_info.side_effect = SmlightConnectionError + + freezer.tick(SCAN_INTERVAL) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + entity = hass.states.get("sensor.mock_title_core_chip_temp") + assert entity is not None + assert entity.state == STATE_UNAVAILABLE + + +async def test_device_info( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + device_registry: dr.DeviceRegistry, +) -> None: + """Test device registry information.""" + entry = await setup_integration(hass, mock_config_entry) + + device_entry = device_registry.async_get_device( + connections={(dr.CONNECTION_NETWORK_MAC, entry.unique_id)} + ) + assert device_entry is not None + assert device_entry == snapshot diff --git a/tests/components/smlight/test_sensor.py b/tests/components/smlight/test_sensor.py new file mode 100644 index 00000000000..4d16a73a0a7 --- /dev/null +++ b/tests/components/smlight/test_sensor.py @@ -0,0 +1,54 @@ +"""Tests for the SMLIGHT sensor platform.""" + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr, entity_registry as er + +from .conftest import setup_integration + +from tests.common import MockConfigEntry, snapshot_platform + +pytestmark = [ + pytest.mark.usefixtures( + "mock_smlight_client", + ) +] + + +@pytest.fixture +def platforms() -> Platform | list[Platform]: + """Platforms, which should be loaded during the test.""" + return Platform.SENSOR + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_sensors( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, + snapshot: SnapshotAssertion, +) -> None: + """Test the SMLIGHT sensors.""" + entry = await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id) + + +async def test_disabled_by_default_sensors( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_config_entry: MockConfigEntry, +) -> None: + """Test the disabled by default SMLIGHT sensors.""" + await setup_integration(hass, mock_config_entry) + + for sensor in ("ram_usage", "filesystem_usage"): + assert not hass.states.get(f"sensor.mock_title_{sensor}") + + assert (entry := entity_registry.async_get(f"sensor.mock_title_{sensor}")) + assert entry.disabled + assert entry.disabled_by is er.RegistryEntryDisabler.INTEGRATION diff --git a/tests/components/snmp/test_integer_sensor.py b/tests/components/snmp/test_integer_sensor.py index dab2b080c97..8e7e0f166ef 100644 --- a/tests/components/snmp/test_integer_sensor.py +++ b/tests/components/snmp/test_integer_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.hlapi import Integer32 +from pysnmp.proto.rfc1902 import Integer32 import pytest from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN diff --git a/tests/components/snmp/test_negative_sensor.py b/tests/components/snmp/test_negative_sensor.py index dba09ea75bd..66a111b68d0 100644 --- a/tests/components/snmp/test_negative_sensor.py +++ b/tests/components/snmp/test_negative_sensor.py @@ -2,7 +2,7 @@ from unittest.mock import patch -from pysnmp.hlapi import Integer32 +from pysnmp.proto.rfc1902 import Integer32 import pytest from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN diff --git a/tests/components/snmp/test_switch.py b/tests/components/snmp/test_switch.py new file mode 100644 index 00000000000..fe1c3922ff0 --- /dev/null +++ b/tests/components/snmp/test_switch.py @@ -0,0 +1,67 @@ +"""SNMP switch tests.""" + +from unittest.mock import patch + +from pysnmp.proto.rfc1902 import Integer32 +import pytest + +from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN +from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +config = { + SWITCH_DOMAIN: { + "platform": "snmp", + "host": "192.168.1.32", + # ippower-mib::ippoweroutlet1.0 + "baseoid": "1.3.6.1.4.1.38107.1.3.1.0", + "payload_on": 1, + "payload_off": 0, + }, +} + + +async def test_snmp_integer_switch_off(hass: HomeAssistant) -> None: + """Test snmp switch returning int 0 for off.""" + + mock_data = Integer32(0) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_OFF + + +async def test_snmp_integer_switch_on(hass: HomeAssistant) -> None: + """Test snmp switch returning int 1 for on.""" + + mock_data = Integer32(1) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_ON + + +async def test_snmp_integer_switch_unknown( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test snmp switch returning int 3 (not a configured payload) for unknown.""" + + mock_data = Integer32(3) + with patch( + "homeassistant.components.snmp.switch.getCmd", + return_value=(None, None, None, [[mock_data]]), + ): + assert await async_setup_component(hass, SWITCH_DOMAIN, config) + await hass.async_block_till_done() + state = hass.states.get("switch.snmp") + assert state.state == STATE_UNKNOWN + assert "Invalid payload '3' received for entity" in caplog.text diff --git a/tests/components/solarlog/snapshots/test_sensor.ambr b/tests/components/solarlog/snapshots/test_sensor.ambr index 5fb369bc3b6..df154a5eb9b 100644 --- a/tests/components/solarlog/snapshots/test_sensor.ambr +++ b/tests/components/solarlog/snapshots/test_sensor.ambr @@ -745,1097 +745,6 @@ 'state': '545', }) # --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_alternator_loss-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_alternator_loss', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Alternator loss', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'alternator_loss', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_alternator_loss', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_alternator_loss-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Alternator loss', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_alternator_loss', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_capacity-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_capacity', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Capacity', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'capacity', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_capacity', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_capacity-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Capacity', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_capacity', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '85.0', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Consumption AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '54.87', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption day', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_day', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption day', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.005', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption month', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption month', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.758', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption total', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '354.687', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption year', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption year', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.587', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_yesterday-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_yesterday', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Consumption yesterday', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'consumption_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_consumption_yesterday', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_consumption_yesterday-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Consumption yesterday', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_consumption_yesterday', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.007', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_efficiency-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_efficiency', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Efficiency', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'efficiency', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_efficiency', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_efficiency-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Efficiency', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_efficiency', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '98.0', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_installed_peak_power-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_installed_peak_power', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Installed peak power', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'total_power', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_total_power', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_installed_peak_power-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Installed peak power', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_installed_peak_power', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '120', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_last_update-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_last_update', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Last update', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'last_update', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_last_updated', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_last_update-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'timestamp', - 'friendly_name': 'solarlog_test_1_2_3 Last update', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_last_update', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_available-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_available', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power available', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_available', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_available', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_available-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power available', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_available', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '45.13', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_dc-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_power_dc', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Power DC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'power_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_power_dc', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_power_dc-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power', - 'friendly_name': 'solarlog_test_1_2_3 Power DC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_power_dc', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '102', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_usage-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_usage', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Usage', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'usage', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_usage', - 'unit_of_measurement': '%', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_usage-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'power_factor', - 'friendly_name': 'solarlog_test_1_2_3 Usage', - 'state_class': , - 'unit_of_measurement': '%', - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_usage', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '54.9', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_ac-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_ac', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage AC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voltage_ac', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_ac', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_ac-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'solarlog_test_1_2_3 Voltage AC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_ac', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_dc-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_dc', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Voltage DC', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'voltage_dc', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_voltage_dc', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_voltage_dc-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'voltage', - 'friendly_name': 'solarlog_test_1_2_3 Voltage DC', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_voltage_dc', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '100', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_day-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_day', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield day', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_day', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_day', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_day-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield day', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_day', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.004', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_month-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_month', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield month', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_month', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_month', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_month-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield month', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_month', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.515', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_total-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_total', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield total', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_total', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_total', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_total-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield total', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_total', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '56.513', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_year-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_year', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield year', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_year', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_year', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_year-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield year', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_year', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1.023', - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_yesterday-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_yesterday', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Yield yesterday', - 'platform': 'solarlog', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'yield_yesterday', - 'unique_id': 'ce5f5431554d101905d31797e1232da8_yield_yesterday', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.solarlog_test_1_2_3_yield_yesterday-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'energy', - 'friendly_name': 'solarlog_test_1_2_3 Yield yesterday', - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.solarlog_test_1_2_3_yield_yesterday', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.005', - }) -# --- # name: test_all_entities[sensor.solarlog_usage-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/solarlog/test_config_flow.py b/tests/components/solarlog/test_config_flow.py index 34da13cdf8f..b06f2ac0587 100644 --- a/tests/components/solarlog/test_config_flow.py +++ b/tests/components/solarlog/test_config_flow.py @@ -7,7 +7,7 @@ from solarlog_cli.solarlog_exceptions import SolarLogConnectionError, SolarLogEr from homeassistant import config_entries from homeassistant.components.solarlog import config_flow -from homeassistant.components.solarlog.const import DEFAULT_HOST, DOMAIN +from homeassistant.components.solarlog.const import DOMAIN from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -45,7 +45,7 @@ async def test_form(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> None: assert len(mock_setup_entry.mock_calls) == 1 -def init_config_flow(hass): +def init_config_flow(hass: HomeAssistant) -> config_flow.SolarLogConfigFlow: """Init a configuration flow.""" flow = config_flow.SolarLogConfigFlow() flow.hass = hass @@ -123,35 +123,6 @@ async def test_form_exceptions( assert result["data"]["extended_data"] is False -async def test_import(hass: HomeAssistant, test_connect) -> None: - """Test import step.""" - flow = init_config_flow(hass) - - # import with only host - result = await flow.async_step_import({CONF_HOST: HOST}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog" - assert result["data"][CONF_HOST] == HOST - - # import with only name - result = await flow.async_step_import({CONF_NAME: NAME}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == DEFAULT_HOST - - # import with host and name - result = await flow.async_step_import({CONF_HOST: HOST, CONF_NAME: NAME}) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "solarlog_test_1_2_3" - assert result["data"][CONF_HOST] == HOST - - async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None: """Test we abort if the device is already setup.""" flow = init_config_flow(hass) @@ -160,11 +131,11 @@ async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None ).add_to_hass(hass) # Should fail, same HOST different NAME (default) - result = await flow.async_step_import( + result = await flow.async_step_user( {CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == "already_configured" + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {CONF_HOST: "already_configured"} # Should fail, same HOST and NAME result = await flow.async_step_user({CONF_HOST: HOST, CONF_NAME: NAME}) @@ -172,7 +143,7 @@ async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None assert result["errors"] == {CONF_HOST: "already_configured"} # SHOULD pass, diff HOST (without http://), different NAME - result = await flow.async_step_import( + result = await flow.async_step_user( {CONF_HOST: "2.2.2.2", CONF_NAME: "solarlog_test_7_8_9", "extended_data": False} ) assert result["type"] is FlowResultType.CREATE_ENTRY @@ -180,7 +151,7 @@ async def test_abort_if_already_setup(hass: HomeAssistant, test_connect) -> None assert result["data"][CONF_HOST] == "http://2.2.2.2" # SHOULD pass, diff HOST, same NAME - result = await flow.async_step_import( + result = await flow.async_step_user( {CONF_HOST: "http://2.2.2.2", CONF_NAME: NAME, "extended_data": False} ) await hass.async_block_till_done() diff --git a/tests/components/soma/test_config_flow.py b/tests/components/soma/test_config_flow.py index 8b8548bfe3e..67109e37c6d 100644 --- a/tests/components/soma/test_config_flow.py +++ b/tests/components/soma/test_config_flow.py @@ -5,7 +5,8 @@ from unittest.mock import patch from api.soma_api import SomaApi from requests import RequestException -from homeassistant.components.soma import DOMAIN, config_flow +from homeassistant.components.soma import DOMAIN +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -17,57 +18,66 @@ MOCK_PORT = 3000 async def test_form(hass: HomeAssistant) -> None: """Test user form showing.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass - result = await flow.async_step_user() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) assert result["type"] is FlowResultType.FORM async def test_import_abort(hass: HomeAssistant) -> None: """Test configuration from YAML aborting with existing entity.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass MockConfigEntry(domain=DOMAIN).add_to_hass(hass) - result = await flow.async_step_import() + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_IMPORT} + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_setup" async def test_import_create(hass: HomeAssistant) -> None: """Test configuration from YAML.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "success"}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY async def test_error_status(hass: HomeAssistant) -> None: """Test Connect successfully returning error status.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "error"}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "result_error" async def test_key_error(hass: HomeAssistant) -> None: """Test Connect returning empty string.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass + with patch.object(SomaApi, "list_devices", return_value={}): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" async def test_exception(hass: HomeAssistant) -> None: """Test if RequestException fires when no connection can be made.""" - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", side_effect=RequestException()): - result = await flow.async_step_import({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_IMPORT}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "connection_error" @@ -75,8 +85,10 @@ async def test_exception(hass: HomeAssistant) -> None: async def test_full_flow(hass: HomeAssistant) -> None: """Check classic use case.""" hass.data[DOMAIN] = {} - flow = config_flow.SomaFlowHandler() - flow.hass = hass with patch.object(SomaApi, "list_devices", return_value={"result": "success"}): - result = await flow.async_step_user({"host": MOCK_HOST, "port": MOCK_PORT}) + result = await hass.config_entries.flow.async_init( + DOMAIN, + context={"source": SOURCE_USER}, + data={"host": MOCK_HOST, "port": MOCK_PORT}, + ) assert result["type"] is FlowResultType.CREATE_ENTRY diff --git a/tests/components/sonarr/test_config_flow.py b/tests/components/sonarr/test_config_flow.py index 6bd14e8b581..118d5020cba 100644 --- a/tests/components/sonarr/test_config_flow.py +++ b/tests/components/sonarr/test_config_flow.py @@ -11,7 +11,7 @@ from homeassistant.components.sonarr.const import ( DEFAULT_WANTED_MAX_ITEMS, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -96,15 +96,7 @@ async def test_full_reauth_flow_implementation( """Test the manual reauth flow from start to finish.""" entry = init_integration - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/songpal/test_config_flow.py b/tests/components/songpal/test_config_flow.py index 8f503360702..5215e9b3c0e 100644 --- a/tests/components/songpal/test_config_flow.py +++ b/tests/components/songpal/test_config_flow.py @@ -6,7 +6,12 @@ from unittest.mock import patch from homeassistant.components import ssdp from homeassistant.components.songpal.const import CONF_ENDPOINT, DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_SSDP, SOURCE_USER +from homeassistant.config_entries import ( + SOURCE_IMPORT, + SOURCE_SSDP, + SOURCE_USER, + ConfigFlowResult, +) from homeassistant.const import CONF_HOST, CONF_NAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -42,7 +47,7 @@ SSDP_DATA = ssdp.SsdpServiceInfo( ) -def _flow_next(hass, flow_id): +def _flow_next(hass: HomeAssistant, flow_id: str) -> ConfigFlowResult: return next( flow for flow in hass.config_entries.flow.async_progress() @@ -143,7 +148,7 @@ async def test_flow_import_without_name(hass: HomeAssistant) -> None: mocked_device.get_interface_information.assert_called_once() -def _create_mock_config_entry(hass): +def _create_mock_config_entry(hass: HomeAssistant) -> MockConfigEntry: MockConfigEntry( domain=DOMAIN, unique_id="uuid:0000", diff --git a/tests/components/songpal/test_media_player.py b/tests/components/songpal/test_media_player.py index 8f56170b839..2baea6cb5c9 100644 --- a/tests/components/songpal/test_media_player.py +++ b/tests/components/songpal/test_media_player.py @@ -2,6 +2,7 @@ from datetime import timedelta import logging +from typing import Any from unittest.mock import AsyncMock, MagicMock, call, patch import pytest @@ -54,12 +55,12 @@ SUPPORT_SONGPAL = ( ) -def _get_attributes(hass): +def _get_attributes(hass: HomeAssistant) -> dict[str, Any]: state = hass.states.get(ENTITY_ID) return state.as_dict()["attributes"] -async def _call(hass, service, **argv): +async def _call(hass: HomeAssistant, service: str, **argv: Any) -> None: await hass.services.async_call( media_player.DOMAIN, service, diff --git a/tests/components/sonos/conftest.py b/tests/components/sonos/conftest.py index bbec7a2308c..04b35e2c021 100644 --- a/tests/components/sonos/conftest.py +++ b/tests/components/sonos/conftest.py @@ -1,15 +1,21 @@ """Configuration for Sonos tests.""" import asyncio -from collections.abc import Callable, Generator +from collections.abc import Callable, Coroutine, Generator from copy import copy from ipaddress import ip_address +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest from soco import SoCo from soco.alarms import Alarms -from soco.data_structures import DidlFavorite, SearchResult +from soco.data_structures import ( + DidlFavorite, + DidlMusicTrack, + DidlPlaylistContainer, + SearchResult, +) from soco.events_base import Event as SonosEvent from homeassistant.components import ssdp, zeroconf @@ -79,7 +85,7 @@ class SonosMockService: class SonosMockEvent: """Mock a sonos Event used in callbacks.""" - def __init__(self, soco, service, variables): + def __init__(self, soco, service, variables) -> None: """Initialize the instance.""" self.sid = f"{soco.uid}_sub0000000001" self.seq = "0" @@ -120,7 +126,9 @@ async def async_autosetup_sonos(async_setup_sonos): @pytest.fixture -def async_setup_sonos(hass, config_entry, fire_zgs_event): +def async_setup_sonos( + hass: HomeAssistant, config_entry: MockConfigEntry, fire_zgs_event +) -> Callable[[], Coroutine[Any, Any, None]]: """Return a coroutine to set up a Sonos integration instance on demand.""" async def _wrapper(): @@ -136,7 +144,7 @@ def async_setup_sonos(hass, config_entry, fire_zgs_event): @pytest.fixture(name="config_entry") -def config_entry_fixture(): +def config_entry_fixture() -> MockConfigEntry: """Create a mock Sonos config entry.""" return MockConfigEntry(domain=DOMAIN, title="Sonos") @@ -181,6 +189,8 @@ class SoCoMockFactory: current_track_info_empty, battery_info, alarm_clock, + sonos_playlists: SearchResult, + sonos_queue: list[DidlMusicTrack], ) -> None: """Initialize the mock factory.""" self.mock_list: dict[str, MockSoCo] = {} @@ -189,6 +199,8 @@ class SoCoMockFactory: self.current_track_info = current_track_info_empty self.battery_info = battery_info self.alarm_clock = alarm_clock + self.sonos_playlists = sonos_playlists + self.sonos_queue = sonos_queue def cache_mock( self, mock_soco: MockSoCo, ip_address: str, name: str = "Zone A" @@ -201,6 +213,8 @@ class SoCoMockFactory: mock_soco.music_library = self.music_library mock_soco.get_current_track_info.return_value = self.current_track_info mock_soco.music_source_from_uri = SoCo.music_source_from_uri + mock_soco.get_sonos_playlists.return_value = self.sonos_playlists + mock_soco.get_queue.return_value = self.sonos_queue my_speaker_info = self.speaker_info.copy() my_speaker_info["zone_name"] = name my_speaker_info["uid"] = mock_soco.uid @@ -249,13 +263,39 @@ def soco_sharelink(): yield mock_instance +@pytest.fixture(name="sonos_websocket") +def sonos_websocket(): + """Fixture to mock SonosWebSocket.""" + with patch( + "homeassistant.components.sonos.speaker.SonosWebsocket" + ) as mock_sonos_ws: + mock_instance = AsyncMock() + mock_instance.play_clip = AsyncMock() + mock_instance.play_clip.return_value = [{"success": 1}, {}] + mock_sonos_ws.return_value = mock_instance + yield mock_instance + + @pytest.fixture(name="soco_factory") def soco_factory( - music_library, speaker_info, current_track_info_empty, battery_info, alarm_clock + music_library, + speaker_info, + current_track_info_empty, + battery_info, + alarm_clock, + sonos_playlists: SearchResult, + sonos_websocket, + sonos_queue: list[DidlMusicTrack], ): """Create factory for instantiating SoCo mocks.""" factory = SoCoMockFactory( - music_library, speaker_info, current_track_info_empty, battery_info, alarm_clock + music_library, + speaker_info, + current_track_info_empty, + battery_info, + alarm_clock, + sonos_playlists, + sonos_queue=sonos_queue, ) with ( patch("homeassistant.components.sonos.SoCo", new=factory.get_mock), @@ -292,7 +332,13 @@ def silent_ssdp_scanner() -> Generator[None]: def discover_fixture(soco): """Create a mock soco discover fixture.""" - def do_callback(hass, callback, *args, **kwargs): + def do_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: callback( ssdp.SsdpServiceInfo( ssdp_location=f"http://{soco.ip_address}/", @@ -326,6 +372,21 @@ def sonos_favorites_fixture() -> SearchResult: return SearchResult(favorite_list, "favorites", 3, 3, 1) +@pytest.fixture(name="sonos_playlists") +def sonos_playlists_fixture() -> SearchResult: + """Create sonos playlist fixture.""" + playlists = load_json_value_fixture("sonos_playlists.json", "sonos") + playlists_list = [DidlPlaylistContainer.from_dict(pl) for pl in playlists] + return SearchResult(playlists_list, "sonos_playlists", 1, 1, 0) + + +@pytest.fixture(name="sonos_queue") +def sonos_queue() -> list[DidlMusicTrack]: + """Create sonos queue fixture.""" + queue = load_json_value_fixture("sonos_queue.json", "sonos") + return [DidlMusicTrack.from_dict(track) for track in queue] + + class MockMusicServiceItem: """Mocks a Soco MusicServiceItem.""" @@ -650,7 +711,9 @@ def zgs_discovery_fixture(): @pytest.fixture(name="fire_zgs_event") -def zgs_event_fixture(hass: HomeAssistant, soco: SoCo, zgs_discovery: str): +def zgs_event_fixture( + hass: HomeAssistant, soco: SoCo, zgs_discovery: str +) -> Callable[[], Coroutine[Any, Any, None]]: """Create alarm_event fixture.""" variables = {"ZoneGroupState": zgs_discovery} diff --git a/tests/components/sonos/fixtures/sonos_playlists.json b/tests/components/sonos/fixtures/sonos_playlists.json new file mode 100644 index 00000000000..f0731467697 --- /dev/null +++ b/tests/components/sonos/fixtures/sonos_playlists.json @@ -0,0 +1,13 @@ +[ + { + "title": "sample playlist", + "parent_id": "SQ:", + "item_id": "SQ:0", + "resources": [ + { + "uri": "file:///jffs/settings/savedqueues.rsq#0", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + } +] diff --git a/tests/components/sonos/fixtures/sonos_queue.json b/tests/components/sonos/fixtures/sonos_queue.json new file mode 100644 index 00000000000..50689a00e1d --- /dev/null +++ b/tests/components/sonos/fixtures/sonos_queue.json @@ -0,0 +1,30 @@ +[ + { + "title": "Something", + "album": "Abbey Road", + "creator": "The Beatles", + "item_id": "Q:0/1", + "parent_id": "Q:0", + "original_track_number": 3, + "resources": [ + { + "uri": "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + }, + { + "title": "Come Together", + "album": "Abbey Road", + "creator": "The Beatles", + "item_id": "Q:0/2", + "parent_id": "Q:0", + "original_track_number": 1, + "resources": [ + { + "uri": "x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3", + "protocol_info": "file:*:audio/mpegurl:*" + } + ] + } +] diff --git a/tests/components/sonos/snapshots/test_media_player.ambr b/tests/components/sonos/snapshots/test_media_player.ambr new file mode 100644 index 00000000000..f382d341de6 --- /dev/null +++ b/tests/components/sonos/snapshots/test_media_player.ambr @@ -0,0 +1,76 @@ +# serializer version: 1 +# name: test_entity_basic[media_player.zone_a-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'media_player', + 'entity_category': None, + 'entity_id': 'media_player.zone_a', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': None, + 'platform': 'sonos', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': None, + 'unique_id': 'RINCON_test', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_basic[media_player.zone_a-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'speaker', + 'friendly_name': 'Zone A', + 'group_members': list([ + 'media_player.zone_a', + ]), + 'is_volume_muted': False, + 'media_content_type': , + 'repeat': , + 'shuffle': False, + 'supported_features': , + 'volume_level': 0.19, + }), + 'context': , + 'entity_id': 'media_player.zone_a', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'idle', + }) +# --- +# name: test_media_get_queue + dict({ + 'media_player.zone_a': list([ + dict({ + 'media_album_name': 'Abbey Road', + 'media_artist': 'The Beatles', + 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/03%20Something.mp3', + 'media_title': 'Something', + }), + dict({ + 'media_album_name': 'Abbey Road', + 'media_artist': 'The Beatles', + 'media_content_id': 'x-file-cifs://192.168.42.10/music/The%20Beatles/Abbey%20Road/01%20Come%20Together.mp3', + 'media_title': 'Come Together', + }), + ]), + }) +# --- diff --git a/tests/components/sonos/test_media_player.py b/tests/components/sonos/test_media_player.py index c765ed82ac6..ac877f47904 100644 --- a/tests/components/sonos/test_media_player.py +++ b/tests/components/sonos/test_media_player.py @@ -1,17 +1,29 @@ """Tests for the Sonos Media Player platform.""" -import logging from typing import Any from unittest.mock import patch import pytest +from soco.data_structures import SearchResult +from sonos_websocket.exception import SonosWebsocketError +from syrupy import SnapshotAssertion from homeassistant.components.media_player import ( + ATTR_INPUT_SOURCE, + ATTR_MEDIA_ANNOUNCE, + ATTR_MEDIA_CONTENT_ID, + ATTR_MEDIA_CONTENT_TYPE, ATTR_MEDIA_ENQUEUE, + ATTR_MEDIA_EXTRA, + ATTR_MEDIA_REPEAT, + ATTR_MEDIA_SHUFFLE, + ATTR_MEDIA_VOLUME_LEVEL, DOMAIN as MP_DOMAIN, + SERVICE_CLEAR_PLAYLIST, SERVICE_PLAY_MEDIA, SERVICE_SELECT_SOURCE, MediaPlayerEnqueue, + RepeatMode, ) from homeassistant.components.sonos.const import ( DOMAIN as SONOS_DOMAIN, @@ -20,18 +32,27 @@ from homeassistant.components.sonos.const import ( ) from homeassistant.components.sonos.media_player import ( LONG_SERVICE_TIMEOUT, + SERVICE_GET_QUEUE, SERVICE_RESTORE, SERVICE_SNAPSHOT, VOLUME_INCREMENT, ) from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_MEDIA_NEXT_TRACK, + SERVICE_MEDIA_PAUSE, + SERVICE_MEDIA_PLAY, + SERVICE_MEDIA_PREVIOUS_TRACK, + SERVICE_MEDIA_STOP, + SERVICE_REPEAT_SET, + SERVICE_SHUFFLE_SET, SERVICE_VOLUME_DOWN, SERVICE_VOLUME_SET, SERVICE_VOLUME_UP, - STATE_IDLE, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import ServiceValidationError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError +from homeassistant.helpers import entity_registry as er from homeassistant.helpers.device_registry import ( CONNECTION_NETWORK_MAC, CONNECTION_UPNP, @@ -39,7 +60,7 @@ from homeassistant.helpers.device_registry import ( ) from homeassistant.setup import async_setup_component -from .conftest import MockMusicServiceItem, MockSoCo, SoCoMockFactory +from .conftest import MockMusicServiceItem, MockSoCo, SoCoMockFactory, SonosMockEvent async def test_device_registry( @@ -77,15 +98,18 @@ async def test_device_registry_not_portable( async def test_entity_basic( - hass: HomeAssistant, async_autosetup_sonos, discover + hass: HomeAssistant, + async_autosetup_sonos, + discover, + entity_registry: er.EntityRegistry, + snapshot: SnapshotAssertion, ) -> None: """Test basic state and attributes.""" - state = hass.states.get("media_player.zone_a") - assert state.state == STATE_IDLE - attributes = state.attributes - assert attributes["friendly_name"] == "Zone A" - assert attributes["is_volume_muted"] is False - assert attributes["volume_level"] == 0.19 + entity_id = "media_player.zone_a" + entity_entry = entity_registry.async_get(entity_id) + assert entity_entry == snapshot(name=f"{entity_entry.entity_id}-entry") + state = hass.states.get(entity_entry.entity_id) + assert state == snapshot(name=f"{entity_entry.entity_id}-state") @pytest.mark.parametrize( @@ -173,9 +197,9 @@ async def test_play_media_library( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": media_content_type, - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: media_content_type, + ATTR_MEDIA_CONTENT_ID: media_content_id, ATTR_MEDIA_ENQUEUE: enqueue, }, blocking=True, @@ -222,9 +246,9 @@ async def test_play_media_lib_track_play( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, }, blocking=True, @@ -251,9 +275,9 @@ async def test_play_media_lib_track_next( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, }, blocking=True, @@ -279,9 +303,9 @@ async def test_play_media_lib_track_replace( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.REPLACE, }, blocking=True, @@ -302,9 +326,9 @@ async def test_play_media_lib_track_add( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "track", - "media_content_id": _track_url, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "track", + ATTR_MEDIA_CONTENT_ID: _track_url, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, }, blocking=True, @@ -332,9 +356,9 @@ async def test_play_media_share_link_add( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, }, blocking=True, @@ -360,9 +384,9 @@ async def test_play_media_share_link_next( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.NEXT, }, blocking=True, @@ -392,9 +416,9 @@ async def test_play_media_share_link_play( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, }, blocking=True, @@ -426,9 +450,9 @@ async def test_play_media_share_link_replace( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": _share_link, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: _share_link, ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.REPLACE, }, blocking=True, @@ -491,9 +515,9 @@ async def test_play_media_music_library_playlist( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: media_content_id, }, blocking=True, ) @@ -515,21 +539,68 @@ async def test_play_media_music_library_playlist_dne( soco_mock = soco_factory.mock_list.get("192.168.42.2") soco_mock.music_library.get_playlists.return_value = _mock_playlists - with caplog.at_level(logging.ERROR): - caplog.clear() + with pytest.raises( + ServiceValidationError, + match=f"Could not find Sonos playlist: {media_content_id}", + ): await hass.services.async_call( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "playlist", - "media_content_id": media_content_id, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: media_content_id, }, blocking=True, ) assert soco_mock.play_uri.call_count == 0 - assert media_content_id in caplog.text - assert "playlist" in caplog.text + + +async def test_play_sonos_playlist( + hass: HomeAssistant, + async_autosetup_sonos, + soco: MockSoCo, + sonos_playlists: SearchResult, +) -> None: + """Test that sonos playlists can be played.""" + + # Test a successful call + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: "sample playlist", + }, + blocking=True, + ) + assert soco.clear_queue.call_count == 1 + assert soco.add_to_queue.call_count == 1 + soco.add_to_queue.asset_called_with( + sonos_playlists[0], timeout=LONG_SERVICE_TIMEOUT + ) + + # Test playing a non-existent playlist + soco.clear_queue.reset_mock() + soco.add_to_queue.reset_mock() + media_content_id: str = "bad playlist" + with pytest.raises( + ServiceValidationError, + match=f"Could not find Sonos playlist: {media_content_id}", + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "playlist", + ATTR_MEDIA_CONTENT_ID: media_content_id, + }, + blocking=True, + ) + assert soco.clear_queue.call_count == 0 + assert soco.add_to_queue.call_count == 0 @pytest.mark.parametrize( @@ -562,8 +633,8 @@ async def test_select_source_line_in_tv( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -605,8 +676,8 @@ async def test_select_source_play_uri( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -645,8 +716,8 @@ async def test_select_source_play_queue( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": source, + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: source, }, blocking=True, ) @@ -674,8 +745,8 @@ async def test_select_source_error( MP_DOMAIN, SERVICE_SELECT_SOURCE, { - "entity_id": "media_player.zone_a", - "source": "invalid_source", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_INPUT_SOURCE: "invalid_source", }, blocking=True, ) @@ -683,6 +754,147 @@ async def test_select_source_error( assert "Could not find a Sonos favorite" in str(sve.value) +async def test_shuffle_set( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, +) -> None: + """Test the set shuffle method.""" + assert soco.play_mode == "NORMAL" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_SHUFFLE: True, + }, + blocking=True, + ) + assert soco.play_mode == "SHUFFLE_NOREPEAT" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_SHUFFLE_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_SHUFFLE: False, + }, + blocking=True, + ) + assert soco.play_mode == "NORMAL" + + +async def test_shuffle_get( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + no_media_event: SonosMockEvent, +) -> None: + """Test the get shuffle attribute by simulating a Sonos Event.""" + subscription = soco.avTransport.subscribe.return_value + sub_callback = subscription.callback + + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_SHUFFLE] is False + + no_media_event.variables["current_play_mode"] = "SHUFFLE_NOREPEAT" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_SHUFFLE] is True + + # The integration keeps a copy of the last event to check for + # changes, so we create a new event. + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "NORMAL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_SHUFFLE] is False + + +async def test_repeat_set( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, +) -> None: + """Test the set repeat method.""" + assert soco.play_mode == "NORMAL" + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.ALL, + }, + blocking=True, + ) + assert soco.play_mode == "REPEAT_ALL" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.ONE, + }, + blocking=True, + ) + assert soco.play_mode == "REPEAT_ONE" + + await hass.services.async_call( + MP_DOMAIN, + SERVICE_REPEAT_SET, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_REPEAT: RepeatMode.OFF, + }, + blocking=True, + ) + assert soco.play_mode == "NORMAL" + + +async def test_repeat_get( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + no_media_event: SonosMockEvent, +) -> None: + """Test the get repeat attribute by simulating a Sonos Event.""" + subscription = soco.avTransport.subscribe.return_value + sub_callback = subscription.callback + + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.OFF + + no_media_event.variables["current_play_mode"] = "REPEAT_ALL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ALL + + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "REPEAT_ONE" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.ONE + + no_media_event = SonosMockEvent( + soco, soco.avTransport, no_media_event.variables.copy() + ) + no_media_event.variables["current_play_mode"] = "NORMAL" + sub_callback(no_media_event) + await hass.async_block_till_done(wait_background_tasks=True) + state = hass.states.get("media_player.zone_a") + assert state.attributes[ATTR_MEDIA_REPEAT] == RepeatMode.OFF + + async def test_play_media_favorite_item_id( hass: HomeAssistant, soco_factory: SoCoMockFactory, @@ -694,9 +906,9 @@ async def test_play_media_favorite_item_id( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "favorite_item_id", - "media_content_id": "FV:2/4", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "favorite_item_id", + ATTR_MEDIA_CONTENT_ID: "FV:2/4", }, blocking=True, ) @@ -716,9 +928,9 @@ async def test_play_media_favorite_item_id( MP_DOMAIN, SERVICE_PLAY_MEDIA, { - "entity_id": "media_player.zone_a", - "media_content_type": "favorite_item_id", - "media_content_id": "UNKNOWN_ID", + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "favorite_item_id", + ATTR_MEDIA_CONTENT_ID: "UNKNOWN_ID", }, blocking=True, ) @@ -756,7 +968,7 @@ async def test_service_snapshot_restore( SONOS_DOMAIN, SERVICE_SNAPSHOT, { - "entity_id": ["media_player.living_room", "media_player.bedroom"], + ATTR_ENTITY_ID: ["media_player.living_room", "media_player.bedroom"], }, blocking=True, ) @@ -769,7 +981,7 @@ async def test_service_snapshot_restore( SONOS_DOMAIN, SERVICE_RESTORE, { - "entity_id": ["media_player.living_room", "media_player.bedroom"], + ATTR_ENTITY_ID: ["media_player.living_room", "media_player.bedroom"], }, blocking=True, ) @@ -788,7 +1000,7 @@ async def test_volume( MP_DOMAIN, SERVICE_VOLUME_UP, { - "entity_id": "media_player.zone_a", + ATTR_ENTITY_ID: "media_player.zone_a", }, blocking=True, ) @@ -798,7 +1010,7 @@ async def test_volume( MP_DOMAIN, SERVICE_VOLUME_DOWN, { - "entity_id": "media_player.zone_a", + ATTR_ENTITY_ID: "media_player.zone_a", }, blocking=True, ) @@ -807,8 +1019,128 @@ async def test_volume( await hass.services.async_call( MP_DOMAIN, SERVICE_VOLUME_SET, - {"entity_id": "media_player.zone_a", "volume_level": 0.30}, + {ATTR_ENTITY_ID: "media_player.zone_a", ATTR_MEDIA_VOLUME_LEVEL: 0.30}, blocking=True, ) # SoCo uses 0..100 for its range. assert soco.volume == 30 + + +@pytest.mark.parametrize( + ("service", "client_call"), + [ + (SERVICE_MEDIA_PLAY, "play"), + (SERVICE_MEDIA_PAUSE, "pause"), + (SERVICE_MEDIA_STOP, "stop"), + (SERVICE_MEDIA_NEXT_TRACK, "next"), + (SERVICE_MEDIA_PREVIOUS_TRACK, "previous"), + (SERVICE_CLEAR_PLAYLIST, "clear_queue"), + ], +) +async def test_media_transport( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + service: str, + client_call: str, +) -> None: + """Test the media player transport services.""" + await hass.services.async_call( + MP_DOMAIN, + service, + { + ATTR_ENTITY_ID: "media_player.zone_a", + }, + blocking=True, + ) + assert getattr(soco, client_call).call_count == 1 + + +async def test_play_media_announce( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + sonos_websocket, +) -> None: + """Test playing media with the announce.""" + content_id: str = "http://10.0.0.1:8123/local/sounds/doorbell.mp3" + volume: float = 0.30 + + # Test the success path + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_CONTENT_ID: content_id, + ATTR_MEDIA_ANNOUNCE: True, + ATTR_MEDIA_EXTRA: {"volume": volume}, + }, + blocking=True, + ) + assert sonos_websocket.play_clip.call_count == 1 + sonos_websocket.play_clip.assert_called_with(content_id, volume=volume) + + # Test receiving a websocket exception + sonos_websocket.play_clip.reset_mock() + sonos_websocket.play_clip.side_effect = SonosWebsocketError("Error Message") + with pytest.raises( + HomeAssistantError, match="Error when calling Sonos websocket: Error Message" + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_CONTENT_ID: content_id, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + ) + assert sonos_websocket.play_clip.call_count == 1 + sonos_websocket.play_clip.assert_called_with(content_id, volume=None) + + # Test receiving a non success result + sonos_websocket.play_clip.reset_mock() + sonos_websocket.play_clip.side_effect = None + retval = {"success": 0} + sonos_websocket.play_clip.return_value = [retval, {}] + with pytest.raises( + HomeAssistantError, match=f"Announcing clip {content_id} failed {retval}" + ): + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: "media_player.zone_a", + ATTR_MEDIA_CONTENT_TYPE: "music", + ATTR_MEDIA_CONTENT_ID: content_id, + ATTR_MEDIA_ANNOUNCE: True, + }, + blocking=True, + ) + assert sonos_websocket.play_clip.call_count == 1 + + +async def test_media_get_queue( + hass: HomeAssistant, + soco: MockSoCo, + async_autosetup_sonos, + soco_factory, + snapshot: SnapshotAssertion, +) -> None: + """Test getting the media queue.""" + soco_mock = soco_factory.mock_list.get("192.168.42.2") + result = await hass.services.async_call( + SONOS_DOMAIN, + SERVICE_GET_QUEUE, + { + ATTR_ENTITY_ID: "media_player.zone_a", + }, + blocking=True, + return_response=True, + ) + soco_mock.get_queue.assert_called_with(max_items=0) + assert result == snapshot diff --git a/tests/components/sonos/test_plex_playback.py b/tests/components/sonos/test_plex_playback.py index 428e970697e..01a66f640d5 100644 --- a/tests/components/sonos/test_plex_playback.py +++ b/tests/components/sonos/test_plex_playback.py @@ -8,17 +8,24 @@ import pytest from homeassistant.components.media_player import ( ATTR_MEDIA_CONTENT_ID, ATTR_MEDIA_CONTENT_TYPE, + ATTR_MEDIA_ENQUEUE, DOMAIN as MP_DOMAIN, SERVICE_PLAY_MEDIA, + MediaPlayerEnqueue, MediaType, ) from homeassistant.components.plex import DOMAIN as PLEX_DOMAIN, PLEX_URI_SCHEME +from homeassistant.components.sonos.media_player import LONG_SERVICE_TIMEOUT from homeassistant.const import ATTR_ENTITY_ID from homeassistant.core import HomeAssistant from homeassistant.exceptions import HomeAssistantError +from .conftest import MockSoCo -async def test_plex_play_media(hass: HomeAssistant, async_autosetup_sonos) -> None: + +async def test_plex_play_media( + hass: HomeAssistant, soco: MockSoCo, async_autosetup_sonos +) -> None: """Test playing media via the Plex integration.""" mock_plex_server = Mock() mock_lookup = mock_plex_server.lookup_media @@ -55,6 +62,9 @@ async def test_plex_play_media(hass: HomeAssistant, async_autosetup_sonos) -> No assert not mock_shuffle.called assert mock_lookup.mock_calls[0][1][0] == MediaType.MUSIC assert mock_lookup.mock_calls[0][2] == json.loads(media_content_id) + assert soco.clear_queue.call_count == 1 + assert soco.play_from_queue.call_count == 1 + soco.play_from_queue.assert_called_with(0) # Test handling shuffle in payload mock_lookup.reset_mock() @@ -130,3 +140,41 @@ async def test_plex_play_media(hass: HomeAssistant, async_autosetup_sonos) -> No assert mock_shuffle.called assert mock_lookup.mock_calls[0][1][0] == PLEX_DOMAIN assert mock_lookup.mock_calls[0][2] == {"plex_key": plex_item_key} + + mock_add_to_queue.reset_mock() + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: media_player, + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: f"{PLEX_URI_SCHEME}{media_content_id}", + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.ADD, + }, + blocking=True, + ) + assert mock_add_to_queue.call_count == 1 + mock_add_to_queue.assert_called_with( + mock_lookup(), timeout=LONG_SERVICE_TIMEOUT + ) + + soco.play_from_queue.reset_mock() + mock_add_to_queue.reset_mock() + mock_add_to_queue.return_value = 9 + await hass.services.async_call( + MP_DOMAIN, + SERVICE_PLAY_MEDIA, + { + ATTR_ENTITY_ID: media_player, + ATTR_MEDIA_CONTENT_TYPE: MediaType.MUSIC, + ATTR_MEDIA_CONTENT_ID: f"{PLEX_URI_SCHEME}{media_content_id}", + ATTR_MEDIA_ENQUEUE: MediaPlayerEnqueue.PLAY, + }, + blocking=True, + ) + assert mock_add_to_queue.call_count == 1 + mock_add_to_queue.assert_called_with( + mock_lookup(), position=1, timeout=LONG_SERVICE_TIMEOUT + ) + assert soco.play_from_queue.call_count == 1 + soco.play_from_queue.assert_called_with(mock_add_to_queue.return_value - 1) diff --git a/tests/components/spc/conftest.py b/tests/components/spc/conftest.py new file mode 100644 index 00000000000..1ccda31e314 --- /dev/null +++ b/tests/components/spc/conftest.py @@ -0,0 +1,26 @@ +"""Tests for Vanderbilt SPC component.""" + +from collections.abc import Generator +from unittest.mock import AsyncMock, patch + +import pyspcwebgw +import pytest + + +@pytest.fixture +def mock_client() -> Generator[AsyncMock]: + """Mock the SPC client.""" + + with patch( + "homeassistant.components.spc.SpcWebGateway", autospec=True + ) as mock_client: + client = mock_client.return_value + client.async_load_parameters.return_value = True + mock_area = AsyncMock(spec=pyspcwebgw.area.Area) + mock_area.id = "1" + mock_area.mode = pyspcwebgw.const.AreaMode.FULL_SET + mock_area.last_changed_by = "Sven" + mock_area.name = "House" + mock_area.verified_alarm = False + client.areas = {"1": mock_area} + yield mock_client diff --git a/tests/components/spc/test_alarm_control_panel.py b/tests/components/spc/test_alarm_control_panel.py new file mode 100644 index 00000000000..7b1ab4ff947 --- /dev/null +++ b/tests/components/spc/test_alarm_control_panel.py @@ -0,0 +1,34 @@ +"""Tests for Vanderbilt SPC component.""" + +from unittest.mock import AsyncMock + +from pyspcwebgw.const import AreaMode + +from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + + +async def test_update_alarm_device(hass: HomeAssistant, mock_client: AsyncMock) -> None: + """Test that alarm panel state changes on incoming websocket data.""" + + config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} + assert await async_setup_component(hass, "spc", config) is True + + await hass.async_block_till_done() + + entity_id = "alarm_control_panel.house" + + assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY + assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" + + mock_area = mock_client.return_value.areas["1"] + + mock_area.mode = AreaMode.UNSET + mock_area.last_changed_by = "Anna" + + await mock_client.call_args_list[0][1]["async_callback"](mock_area) + await hass.async_block_till_done() + + assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED + assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" diff --git a/tests/components/spc/test_init.py b/tests/components/spc/test_init.py index 4f335e2f980..dc407dc2c5b 100644 --- a/tests/components/spc/test_init.py +++ b/tests/components/spc/test_init.py @@ -1,73 +1,22 @@ """Tests for Vanderbilt SPC component.""" -from unittest.mock import Mock, PropertyMock, patch +from unittest.mock import AsyncMock -import pyspcwebgw -from pyspcwebgw.const import AreaMode - -from homeassistant.components.spc import DATA_API -from homeassistant.const import STATE_ALARM_ARMED_AWAY, STATE_ALARM_DISARMED from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component -async def test_valid_device_config(hass: HomeAssistant) -> None: +async def test_valid_device_config(hass: HomeAssistant, mock_client: AsyncMock) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is True + assert await async_setup_component(hass, "spc", config) is True -async def test_invalid_device_config(hass: HomeAssistant) -> None: +async def test_invalid_device_config( + hass: HomeAssistant, mock_client: AsyncMock +) -> None: """Test valid device config.""" config = {"spc": {"api_url": "http://localhost/"}} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is False - - -async def test_update_alarm_device(hass: HomeAssistant) -> None: - """Test that alarm panel state changes on incoming websocket data.""" - - config = {"spc": {"api_url": "http://localhost/", "ws_url": "ws://localhost/"}} - - area_mock = Mock( - spec=pyspcwebgw.area.Area, - id="1", - mode=AreaMode.FULL_SET, - last_changed_by="Sven", - ) - area_mock.name = "House" - area_mock.verified_alarm = False - - with patch( - "homeassistant.components.spc.SpcWebGateway.areas", new_callable=PropertyMock - ) as mock_areas: - mock_areas.return_value = {"1": area_mock} - with patch( - "homeassistant.components.spc.SpcWebGateway.async_load_parameters", - return_value=True, - ): - assert await async_setup_component(hass, "spc", config) is True - - await hass.async_block_till_done() - - entity_id = "alarm_control_panel.house" - - assert hass.states.get(entity_id).state == STATE_ALARM_ARMED_AWAY - assert hass.states.get(entity_id).attributes["changed_by"] == "Sven" - - area_mock.mode = AreaMode.UNSET - area_mock.last_changed_by = "Anna" - await hass.data[DATA_API]._async_callback(area_mock) - await hass.async_block_till_done() - - assert hass.states.get(entity_id).state == STATE_ALARM_DISARMED - assert hass.states.get(entity_id).attributes["changed_by"] == "Anna" + assert await async_setup_component(hass, "spc", config) is False diff --git a/tests/components/spotify/test_config_flow.py b/tests/components/spotify/test_config_flow.py index 6040fcd84f2..09feb4a6e83 100644 --- a/tests/components/spotify/test_config_flow.py +++ b/tests/components/spotify/test_config_flow.py @@ -13,7 +13,7 @@ from homeassistant.components.application_credentials import ( async_import_client_credential, ) from homeassistant.components.spotify.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER, SOURCE_ZEROCONF +from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -201,15 +201,7 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -269,15 +261,7 @@ async def test_reauth_account_mismatch( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) diff --git a/tests/components/ssdp/conftest.py b/tests/components/ssdp/conftest.py index 8b06163cd95..ac0ac7298a8 100644 --- a/tests/components/ssdp/conftest.py +++ b/tests/components/ssdp/conftest.py @@ -1,11 +1,14 @@ """Configuration for SSDP tests.""" +from collections.abc import Generator from unittest.mock import AsyncMock, patch from async_upnp_client.server import UpnpServer from async_upnp_client.ssdp_listener import SsdpListener import pytest +from homeassistant.core import HomeAssistant + @pytest.fixture(autouse=True) async def silent_ssdp_listener(): @@ -32,7 +35,7 @@ async def disabled_upnp_server(): @pytest.fixture -def mock_flow_init(hass): +def mock_flow_init(hass: HomeAssistant) -> Generator[AsyncMock]: """Mock hass.config_entries.flow.async_init.""" with patch.object( hass.config_entries.flow, "async_init", return_value=AsyncMock() diff --git a/tests/components/steam_online/test_config_flow.py b/tests/components/steam_online/test_config_flow.py index a5bce80d890..140a8309ff9 100644 --- a/tests/components/steam_online/test_config_flow.py +++ b/tests/components/steam_online/test_config_flow.py @@ -5,8 +5,8 @@ from unittest.mock import patch import steam from homeassistant.components.steam_online.const import CONF_ACCOUNTS, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER -from homeassistant.const import CONF_API_KEY, CONF_SOURCE +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import entity_registry as er @@ -111,18 +111,10 @@ async def test_flow_user_already_configured(hass: HomeAssistant) -> None: async def test_flow_reauth(hass: HomeAssistant) -> None: """Test reauth step.""" entry = create_entry(hass) + result = await entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with patch_interface(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={}, diff --git a/tests/components/stream/conftest.py b/tests/components/stream/conftest.py index 0142d71a805..39e4de13fed 100644 --- a/tests/components/stream/conftest.py +++ b/tests/components/stream/conftest.py @@ -16,6 +16,7 @@ import asyncio from collections.abc import Generator import logging import threading +from typing import Any from unittest.mock import Mock, patch from aiohttp import web @@ -32,7 +33,7 @@ TEST_TIMEOUT = 7.0 # Lower than 9s home assistant timeout class WorkerSync: """Test fixture that intercepts stream worker calls to StreamOutput.""" - def __init__(self): + def __init__(self) -> None: """Initialize WorkerSync.""" self._event = None self._original = StreamState.discontinuity @@ -60,7 +61,7 @@ class WorkerSync: @pytest.fixture -def stream_worker_sync(hass): +def stream_worker_sync() -> Generator[WorkerSync]: """Patch StreamOutput to allow test to synchronize worker stream end.""" sync = WorkerSync() with patch( @@ -74,7 +75,7 @@ def stream_worker_sync(hass): class HLSSync: """Test fixture that intercepts stream worker calls to StreamOutput.""" - def __init__(self): + def __init__(self) -> None: """Initialize HLSSync.""" self._request_event = asyncio.Event() self._original_recv = StreamOutput.recv @@ -91,7 +92,7 @@ class HLSSync: self.check_requests_ready() class SyncResponse(web.Response): - def __init__(self, *args, **kwargs) -> None: + def __init__(self, *args: Any, **kwargs: Any) -> None: super().__init__(*args, **kwargs) on_resp() diff --git a/tests/components/stream/test_hls.py b/tests/components/stream/test_hls.py index ce66848a2b1..babd7c0b748 100644 --- a/tests/components/stream/test_hls.py +++ b/tests/components/stream/test_hls.py @@ -54,7 +54,7 @@ async def setup_component(hass: HomeAssistant) -> None: class HlsClient: """Test fixture for fetching the hls stream.""" - def __init__(self, http_client, parsed_url): + def __init__(self, http_client, parsed_url) -> None: """Initialize HlsClient.""" self.http_client = http_client self.parsed_url = parsed_url diff --git a/tests/components/stream/test_worker.py b/tests/components/stream/test_worker.py index a96866eac4b..73c51087ef1 100644 --- a/tests/components/stream/test_worker.py +++ b/tests/components/stream/test_worker.py @@ -100,7 +100,7 @@ def mock_stream_settings(hass: HomeAssistant) -> None: class FakeAvInputStream: """A fake pyav Stream.""" - def __init__(self, name, time_base): + def __init__(self, name, time_base) -> None: """Initialize the stream.""" self.name = name self.time_base = time_base @@ -142,7 +142,7 @@ class PacketSequence: exercise corner cases. """ - def __init__(self, num_packets): + def __init__(self, num_packets) -> None: """Initialize the sequence with the number of packets it provides.""" self.packet = 0 self.num_packets = num_packets @@ -160,7 +160,7 @@ class PacketSequence: class FakePacket(bytearray): # Be a bytearray so that memoryview works - def __init__(self): + def __init__(self) -> None: super().__init__(3) time_base = VIDEO_TIME_BASE @@ -181,7 +181,7 @@ class PacketSequence: class FakePyAvContainer: """A fake container returned by mock av.open for a stream.""" - def __init__(self, video_stream, audio_stream): + def __init__(self, video_stream, audio_stream) -> None: """Initialize the fake container.""" # Tests can override this to trigger different worker behavior self.packets = PacketSequence(0) @@ -209,7 +209,7 @@ class FakePyAvContainer: class FakePyAvBuffer: """Holds outputs of the decoded stream for tests to assert on results.""" - def __init__(self): + def __init__(self) -> None: """Initialize the FakePyAvBuffer.""" self.segments = [] self.audio_packets = [] @@ -220,7 +220,7 @@ class FakePyAvBuffer: """Create an output buffer that captures packets for test to examine.""" class FakeAvOutputStream: - def __init__(self, capture_packets): + def __init__(self, capture_packets) -> None: self.capture_packets = capture_packets self.type = "ignored-type" @@ -266,7 +266,7 @@ class FakePyAvBuffer: class MockPyAv: """Mocks out av.open.""" - def __init__(self, video=True, audio=False): + def __init__(self, video=True, audio=False) -> None: """Initialize the MockPyAv.""" video_stream = VIDEO_STREAM if video else None audio_stream = AUDIO_STREAM if audio else None @@ -283,7 +283,12 @@ class MockPyAv: return self.container -def run_worker(hass, stream, stream_source, stream_settings=None): +def run_worker( + hass: HomeAssistant, + stream: Stream, + stream_source: str, + stream_settings: StreamSettings | None = None, +) -> None: """Run the stream worker under test.""" stream_state = StreamState(hass, stream.outputs, stream._diagnostics) stream_worker( @@ -296,7 +301,12 @@ def run_worker(hass, stream, stream_source, stream_settings=None): ) -async def async_decode_stream(hass, packets, py_av=None, stream_settings=None): +async def async_decode_stream( + hass: HomeAssistant, + packets: PacketSequence, + py_av: MockPyAv | None = None, + stream_settings: StreamSettings | None = None, +) -> FakePyAvBuffer: """Start a stream worker that decodes incoming stream packets into output segments.""" stream = Stream( hass, diff --git a/tests/components/stt/common.py b/tests/components/stt/common.py index e6c36c5b350..f964fca6b67 100644 --- a/tests/components/stt/common.py +++ b/tests/components/stt/common.py @@ -2,11 +2,22 @@ from __future__ import annotations -from collections.abc import Callable, Coroutine +from collections.abc import AsyncIterable, Callable, Coroutine from pathlib import Path from typing import Any -from homeassistant.components.stt import Provider +from homeassistant.components.stt import ( + AudioBitRates, + AudioChannels, + AudioCodecs, + AudioFormats, + AudioSampleRates, + Provider, + SpeechMetadata, + SpeechResult, + SpeechResultState, + SpeechToTextEntity, +) from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers.entity_platform import AddEntitiesCallback @@ -14,6 +25,80 @@ from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from tests.common import MockPlatform, mock_platform +TEST_DOMAIN = "test" + + +class BaseProvider: + """Mock STT provider.""" + + fail_process_audio = False + + def __init__( + self, *, supported_languages: list[str] | None = None, text: str = "test_result" + ) -> None: + """Init test provider.""" + self._supported_languages = supported_languages or ["de", "de-CH", "en"] + self.calls: list[tuple[SpeechMetadata, AsyncIterable[bytes]]] = [] + self.received: list[bytes] = [] + self.text = text + + @property + def supported_languages(self) -> list[str]: + """Return a list of supported languages.""" + return self._supported_languages + + @property + def supported_formats(self) -> list[AudioFormats]: + """Return a list of supported formats.""" + return [AudioFormats.WAV, AudioFormats.OGG] + + @property + def supported_codecs(self) -> list[AudioCodecs]: + """Return a list of supported codecs.""" + return [AudioCodecs.PCM, AudioCodecs.OPUS] + + @property + def supported_bit_rates(self) -> list[AudioBitRates]: + """Return a list of supported bitrates.""" + return [AudioBitRates.BITRATE_16] + + @property + def supported_sample_rates(self) -> list[AudioSampleRates]: + """Return a list of supported samplerates.""" + return [AudioSampleRates.SAMPLERATE_16000] + + @property + def supported_channels(self) -> list[AudioChannels]: + """Return a list of supported channels.""" + return [AudioChannels.CHANNEL_MONO] + + async def async_process_audio_stream( + self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] + ) -> SpeechResult: + """Process an audio stream.""" + self.calls.append((metadata, stream)) + async for data in stream: + if not data: + break + self.received.append(data) + if self.fail_process_audio: + return SpeechResult(None, SpeechResultState.ERROR) + + return SpeechResult(self.text, SpeechResultState.SUCCESS) + + +class MockSTTProvider(BaseProvider, Provider): + """Mock provider.""" + + url_path = TEST_DOMAIN + + +class MockSTTProviderEntity(BaseProvider, SpeechToTextEntity): + """Mock provider entity.""" + + url_path = "stt.test" + _attr_name = "test" + class MockSTTPlatform(MockPlatform): """Help to set up test stt service.""" diff --git a/tests/components/stt/test_init.py b/tests/components/stt/test_init.py index ca2685ff827..92225123995 100644 --- a/tests/components/stt/test_init.py +++ b/tests/components/stt/test_init.py @@ -1,6 +1,7 @@ """Test STT component setup.""" -from collections.abc import AsyncIterable, Generator +from collections.abc import Generator, Iterable +from contextlib import ExitStack from http import HTTPStatus from pathlib import Path from unittest.mock import AsyncMock @@ -9,16 +10,6 @@ import pytest from homeassistant.components.stt import ( DOMAIN, - AudioBitRates, - AudioChannels, - AudioCodecs, - AudioFormats, - AudioSampleRates, - Provider, - SpeechMetadata, - SpeechResult, - SpeechResultState, - SpeechToTextEntity, async_default_engine, async_get_provider, async_get_speech_to_text_engine, @@ -28,7 +19,13 @@ from homeassistant.core import HomeAssistant, State from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.setup import async_setup_component -from .common import mock_stt_entity_platform, mock_stt_platform +from .common import ( + TEST_DOMAIN, + MockSTTProvider, + MockSTTProviderEntity, + mock_stt_entity_platform, + mock_stt_platform, +) from tests.common import ( MockConfigEntry, @@ -40,102 +37,40 @@ from tests.common import ( ) from tests.typing import ClientSessionGenerator, WebSocketGenerator -TEST_DOMAIN = "test" - - -class BaseProvider: - """Mock provider.""" - - fail_process_audio = False - - def __init__(self) -> None: - """Init test provider.""" - self.calls: list[tuple[SpeechMetadata, AsyncIterable[bytes]]] = [] - - @property - def supported_languages(self) -> list[str]: - """Return a list of supported languages.""" - return ["de", "de-CH", "en"] - - @property - def supported_formats(self) -> list[AudioFormats]: - """Return a list of supported formats.""" - return [AudioFormats.WAV, AudioFormats.OGG] - - @property - def supported_codecs(self) -> list[AudioCodecs]: - """Return a list of supported codecs.""" - return [AudioCodecs.PCM, AudioCodecs.OPUS] - - @property - def supported_bit_rates(self) -> list[AudioBitRates]: - """Return a list of supported bitrates.""" - return [AudioBitRates.BITRATE_16] - - @property - def supported_sample_rates(self) -> list[AudioSampleRates]: - """Return a list of supported samplerates.""" - return [AudioSampleRates.SAMPLERATE_16000] - - @property - def supported_channels(self) -> list[AudioChannels]: - """Return a list of supported channels.""" - return [AudioChannels.CHANNEL_MONO] - - async def async_process_audio_stream( - self, metadata: SpeechMetadata, stream: AsyncIterable[bytes] - ) -> SpeechResult: - """Process an audio stream.""" - self.calls.append((metadata, stream)) - if self.fail_process_audio: - return SpeechResult(None, SpeechResultState.ERROR) - - return SpeechResult("test_result", SpeechResultState.SUCCESS) - - -class MockProvider(BaseProvider, Provider): - """Mock provider.""" - - url_path = TEST_DOMAIN - - -class MockProviderEntity(BaseProvider, SpeechToTextEntity): - """Mock provider entity.""" - - url_path = "stt.test" - _attr_name = "test" - @pytest.fixture -def mock_provider() -> MockProvider: +def mock_provider() -> MockSTTProvider: """Test provider fixture.""" - return MockProvider() + return MockSTTProvider() @pytest.fixture -def mock_provider_entity() -> MockProviderEntity: +def mock_provider_entity() -> MockSTTProviderEntity: """Test provider entity fixture.""" - return MockProviderEntity() + return MockSTTProviderEntity() class STTFlow(ConfigFlow): """Test flow.""" -@pytest.fixture(name="config_flow_test_domain") -def config_flow_test_domain_fixture() -> str: +@pytest.fixture(name="config_flow_test_domains") +def config_flow_test_domain_fixture() -> Iterable[str]: """Test domain fixture.""" - return TEST_DOMAIN + return (TEST_DOMAIN,) @pytest.fixture(autouse=True) def config_flow_fixture( - hass: HomeAssistant, config_flow_test_domain: str + hass: HomeAssistant, config_flow_test_domains: Iterable[str] ) -> Generator[None]: """Mock config flow.""" - mock_platform(hass, f"{config_flow_test_domain}.config_flow") + for domain in config_flow_test_domains: + mock_platform(hass, f"{domain}.config_flow") - with mock_config_flow(config_flow_test_domain, STTFlow): + with ExitStack() as stack: + for domain in config_flow_test_domains: + stack.enter_context(mock_config_flow(domain, STTFlow)) yield @@ -144,14 +79,14 @@ async def setup_fixture( hass: HomeAssistant, tmp_path: Path, request: pytest.FixtureRequest, -) -> MockProvider | MockProviderEntity: +) -> MockSTTProvider | MockSTTProviderEntity: """Set up the test environment.""" - provider: MockProvider | MockProviderEntity + provider: MockSTTProvider | MockSTTProviderEntity if request.param == "mock_setup": - provider = MockProvider() + provider = MockSTTProvider() await mock_setup(hass, tmp_path, provider) elif request.param == "mock_config_entry_setup": - provider = MockProviderEntity() + provider = MockSTTProviderEntity() await mock_config_entry_setup(hass, tmp_path, provider) else: raise RuntimeError("Invalid setup fixture") @@ -162,7 +97,7 @@ async def setup_fixture( async def mock_setup( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Set up a test provider.""" mock_stt_platform( @@ -178,7 +113,7 @@ async def mock_setup( async def mock_config_entry_setup( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, + mock_provider_entity: MockSTTProviderEntity, test_domain: str = TEST_DOMAIN, ) -> MockConfigEntry: """Set up a test provider via config entry.""" @@ -230,7 +165,7 @@ async def mock_config_entry_setup( async def test_get_provider_info( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test engine that doesn't exist.""" client = await hass_client() @@ -252,7 +187,7 @@ async def test_get_provider_info( async def test_non_existing_provider( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test streaming to engine that doesn't exist.""" client = await hass_client() @@ -278,7 +213,7 @@ async def test_non_existing_provider( async def test_stream_audio( hass: HomeAssistant, hass_client: ClientSessionGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test streaming audio and getting response.""" client = await hass_client() @@ -339,7 +274,7 @@ async def test_metadata_errors( header: str | None, status: int, error: str, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, ) -> None: """Test metadata errors.""" client = await hass_client() @@ -355,7 +290,7 @@ async def test_metadata_errors( async def test_get_provider( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Test we can get STT providers.""" await mock_setup(hass, tmp_path, mock_provider) @@ -366,7 +301,7 @@ async def test_get_provider( async def test_config_entry_unload( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test we can unload config entry.""" config_entry = await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) @@ -378,7 +313,7 @@ async def test_config_entry_unload( async def test_restore_state( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, + mock_provider_entity: MockSTTProviderEntity, ) -> None: """Test we restore state in the integration.""" entity_id = f"{DOMAIN}.{TEST_DOMAIN}" @@ -395,15 +330,19 @@ async def test_restore_state( @pytest.mark.parametrize( - ("setup", "engine_id"), - [("mock_setup", "test"), ("mock_config_entry_setup", "stt.test")], + ("setup", "engine_id", "extra_data"), + [ + ("mock_setup", "test", {"name": "test"}), + ("mock_config_entry_setup", "stt.test", {}), + ], indirect=["setup"], ) async def test_ws_list_engines( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, - setup: MockProvider | MockProviderEntity, + setup: MockSTTProvider | MockSTTProviderEntity, engine_id: str, + extra_data: dict[str, str], ) -> None: """Test listing speech-to-text engines.""" client = await hass_ws_client() @@ -415,6 +354,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de", "de-CH", "en"]} + | extra_data ] } @@ -423,7 +363,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": []}] + "providers": [{"engine_id": engine_id, "supported_languages": []} | extra_data] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "en"}) @@ -431,7 +371,9 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["en"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["en"]} | extra_data + ] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "en-UK"}) @@ -439,7 +381,9 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["en"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["en"]} | extra_data + ] } await client.send_json_auto_id({"type": "stt/engine/list", "language": "de"}) @@ -447,7 +391,10 @@ async def test_ws_list_engines( assert msg["type"] == "result" assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["de", "de-CH"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["de", "de-CH"]} + | extra_data + ] } await client.send_json_auto_id( @@ -457,7 +404,10 @@ async def test_ws_list_engines( assert msg["type"] == "result" assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": ["de-CH", "de"]}] + "providers": [ + {"engine_id": engine_id, "supported_languages": ["de-CH", "de"]} + | extra_data + ] } @@ -472,7 +422,7 @@ async def test_default_engine_none(hass: HomeAssistant, tmp_path: Path) -> None: async def test_default_engine( hass: HomeAssistant, tmp_path: Path, - mock_provider: MockProvider, + mock_provider: MockSTTProvider, ) -> None: """Test async_default_engine.""" mock_stt_platform( @@ -488,7 +438,7 @@ async def test_default_engine( async def test_default_engine_entity( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test async_default_engine.""" await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) @@ -496,21 +446,25 @@ async def test_default_engine_entity( assert async_default_engine(hass) == f"{DOMAIN}.{TEST_DOMAIN}" -@pytest.mark.parametrize("config_flow_test_domain", ["new_test"]) -async def test_default_engine_prefer_provider( +@pytest.mark.parametrize("config_flow_test_domains", [("new_test",)]) +async def test_default_engine_prefer_entity( hass: HomeAssistant, tmp_path: Path, - mock_provider_entity: MockProviderEntity, - mock_provider: MockProvider, - config_flow_test_domain: str, + mock_provider_entity: MockSTTProviderEntity, + mock_provider: MockSTTProvider, + config_flow_test_domains: str, ) -> None: - """Test async_default_engine.""" + """Test async_default_engine. + + In this tests there's an entity and a legacy provider. + The test asserts async_default_engine returns the entity. + """ mock_provider_entity.url_path = "stt.new_test" mock_provider_entity._attr_name = "New test" await mock_setup(hass, tmp_path, mock_provider) await mock_config_entry_setup( - hass, tmp_path, mock_provider_entity, test_domain=config_flow_test_domain + hass, tmp_path, mock_provider_entity, test_domain=config_flow_test_domains[0] ) await hass.async_block_till_done() @@ -520,11 +474,53 @@ async def test_default_engine_prefer_provider( provider_engine = async_get_speech_to_text_engine(hass, "test") assert provider_engine is not None assert provider_engine.name == "test" - assert async_default_engine(hass) == "test" + assert async_default_engine(hass) == "stt.new_test" + + +@pytest.mark.parametrize( + "config_flow_test_domains", + [ + # Test different setup order to ensure the default is not influenced + # by setup order. + ("cloud", "new_test"), + ("new_test", "cloud"), + ], +) +async def test_default_engine_prefer_cloud_entity( + hass: HomeAssistant, + tmp_path: Path, + mock_provider: MockSTTProvider, + config_flow_test_domains: str, +) -> None: + """Test async_default_engine. + + In this tests there's an entity from domain cloud, an entity from domain new_test + and a legacy provider. + The test asserts async_default_engine returns the entity from domain cloud. + """ + await mock_setup(hass, tmp_path, mock_provider) + for domain in config_flow_test_domains: + entity = MockSTTProviderEntity() + entity.url_path = f"stt.{domain}" + entity._attr_name = f"{domain} STT entity" + await mock_config_entry_setup(hass, tmp_path, entity, test_domain=domain) + await hass.async_block_till_done() + + for domain in config_flow_test_domains: + entity_engine = async_get_speech_to_text_engine( + hass, f"stt.{domain}_stt_entity" + ) + assert entity_engine is not None + assert entity_engine.name == f"{domain} STT entity" + + provider_engine = async_get_speech_to_text_engine(hass, "test") + assert provider_engine is not None + assert provider_engine.name == "test" + assert async_default_engine(hass) == "stt.cloud_stt_entity" async def test_get_engine_legacy( - hass: HomeAssistant, tmp_path: Path, mock_provider: MockProvider + hass: HomeAssistant, tmp_path: Path, mock_provider: MockSTTProvider ) -> None: """Test async_get_speech_to_text_engine.""" mock_stt_platform( @@ -549,7 +545,7 @@ async def test_get_engine_legacy( async def test_get_engine_entity( - hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockProviderEntity + hass: HomeAssistant, tmp_path: Path, mock_provider_entity: MockSTTProviderEntity ) -> None: """Test async_get_speech_to_text_engine.""" await mock_config_entry_setup(hass, tmp_path, mock_provider_entity) diff --git a/tests/components/stt/test_legacy.py b/tests/components/stt/test_legacy.py index 04068b012f1..20fa86b4d20 100644 --- a/tests/components/stt/test_legacy.py +++ b/tests/components/stt/test_legacy.py @@ -41,7 +41,7 @@ async def test_platform_setup_with_error( discovery_info: DiscoveryInfoType | None = None, ) -> Provider: """Raise exception during platform setup.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_stt_platform(hass, tmp_path, "bad_stt", async_get_engine=async_get_engine) diff --git a/tests/components/subaru/conftest.py b/tests/components/subaru/conftest.py index f769eba252c..e18ea8fd398 100644 --- a/tests/components/subaru/conftest.py +++ b/tests/components/subaru/conftest.py @@ -100,7 +100,7 @@ TEST_DEVICE_NAME = "test_vehicle_2" TEST_ENTITY_ID = f"sensor.{TEST_DEVICE_NAME}_odometer" -def advance_time_to_next_fetch(hass): +def advance_time_to_next_fetch(hass: HomeAssistant) -> None: """Fast forward time to next fetch.""" future = dt_util.utcnow() + timedelta(seconds=FETCH_INTERVAL + 30) async_fire_time_changed(hass, future) @@ -181,7 +181,7 @@ async def setup_subaru_config_entry( @pytest.fixture -async def subaru_config_entry(hass): +async def subaru_config_entry(hass: HomeAssistant) -> MockConfigEntry: """Create a Subaru config entry prior to setup.""" await async_setup_component(hass, HA_DOMAIN, {}) config_entry = MockConfigEntry(**TEST_CONFIG_ENTRY) @@ -190,7 +190,9 @@ async def subaru_config_entry(hass): @pytest.fixture -async def ev_entry(hass, subaru_config_entry): +async def ev_entry( + hass: HomeAssistant, subaru_config_entry: MockConfigEntry +) -> MockConfigEntry: """Create a Subaru entry representing an EV vehicle with full STARLINK subscription.""" await setup_subaru_config_entry(hass, subaru_config_entry) assert DOMAIN in hass.config_entries.async_domains() diff --git a/tests/components/subaru/test_config_flow.py b/tests/components/subaru/test_config_flow.py index 9bddeeee051..6abc544c92a 100644 --- a/tests/components/subaru/test_config_flow.py +++ b/tests/components/subaru/test_config_flow.py @@ -10,6 +10,7 @@ from subarulink.exceptions import InvalidCredentials, InvalidPIN, SubaruExceptio from homeassistant import config_entries from homeassistant.components.subaru import config_flow from homeassistant.components.subaru.const import CONF_UPDATE_ENABLED, DOMAIN +from homeassistant.config_entries import ConfigFlowResult from homeassistant.const import CONF_DEVICE_ID, CONF_PIN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -389,7 +390,7 @@ async def test_option_flow(hass: HomeAssistant, options_form) -> None: @pytest.fixture -async def user_form(hass): +async def user_form(hass: HomeAssistant) -> ConfigFlowResult: """Return initial form for Subaru config flow.""" return await hass.config_entries.flow.async_init( config_flow.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -397,7 +398,9 @@ async def user_form(hass): @pytest.fixture -async def two_factor_start_form(hass, user_form): +async def two_factor_start_form( + hass: HomeAssistant, user_form: ConfigFlowResult +) -> ConfigFlowResult: """Return two factor form for Subaru config flow.""" with ( patch(MOCK_API_CONNECT, return_value=True), @@ -410,7 +413,9 @@ async def two_factor_start_form(hass, user_form): @pytest.fixture -async def two_factor_verify_form(hass, two_factor_start_form): +async def two_factor_verify_form( + hass: HomeAssistant, two_factor_start_form: ConfigFlowResult +) -> ConfigFlowResult: """Return two factor form for Subaru config flow.""" with ( patch( @@ -427,7 +432,9 @@ async def two_factor_verify_form(hass, two_factor_start_form): @pytest.fixture -async def pin_form(hass, two_factor_verify_form): +async def pin_form( + hass: HomeAssistant, two_factor_verify_form: ConfigFlowResult +) -> ConfigFlowResult: """Return PIN input form for Subaru config flow.""" with ( patch( @@ -443,7 +450,7 @@ async def pin_form(hass, two_factor_verify_form): @pytest.fixture -async def options_form(hass): +async def options_form(hass: HomeAssistant) -> ConfigFlowResult: """Return options form for Subaru config flow.""" entry = MockConfigEntry(domain=DOMAIN, data={}, options=None) entry.add_to_hass(hass) diff --git a/tests/components/sunweg/test_config_flow.py b/tests/components/sunweg/test_config_flow.py index 80b6a946749..8103003d7fb 100644 --- a/tests/components/sunweg/test_config_flow.py +++ b/tests/components/sunweg/test_config_flow.py @@ -69,14 +69,7 @@ async def test_reauth(hass: HomeAssistant, plant_fixture, inverter_fixture) -> N assert entries[0].data[CONF_USERNAME] == SUNWEG_MOCK_ENTRY.data[CONF_USERNAME] assert entries[0].data[CONF_PASSWORD] == SUNWEG_MOCK_ENTRY.data[CONF_PASSWORD] - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/surepetcare/test_config_flow.py b/tests/components/surepetcare/test_config_flow.py index c3c13195aca..c4055ebe658 100644 --- a/tests/components/surepetcare/test_config_flow.py +++ b/tests/components/surepetcare/test_config_flow.py @@ -155,15 +155,7 @@ async def test_reauthentication(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -192,15 +184,7 @@ async def test_reauthentication_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -230,15 +214,7 @@ async def test_reauthentication_cannot_connect(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -268,15 +244,7 @@ async def test_reauthentication_unknown_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/swiss_public_transport/__init__.py b/tests/components/swiss_public_transport/__init__.py index 3859a630c31..98262324b11 100644 --- a/tests/components/swiss_public_transport/__init__.py +++ b/tests/components/swiss_public_transport/__init__.py @@ -1 +1,13 @@ """Tests for the swiss_public_transport integration.""" + +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/swiss_public_transport/fixtures/connections.json b/tests/components/swiss_public_transport/fixtures/connections.json new file mode 100644 index 00000000000..4edead56f14 --- /dev/null +++ b/tests/components/swiss_public_transport/fixtures/connections.json @@ -0,0 +1,130 @@ +[ + { + "departure": "2024-01-06T18:03:00+0100", + "number": 0, + "platform": 0, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:04:00+0100", + "number": 1, + "platform": 1, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:05:00+0100", + "number": 2, + "platform": 2, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:06:00+0100", + "number": 3, + "platform": 3, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:07:00+0100", + "number": 4, + "platform": 4, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:08:00+0100", + "number": 5, + "platform": 5, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:09:00+0100", + "number": 6, + "platform": 6, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:10:00+0100", + "number": 7, + "platform": 7, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:11:00+0100", + "number": 8, + "platform": 8, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:12:00+0100", + "number": 9, + "platform": 9, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:13:00+0100", + "number": 10, + "platform": 10, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:14:00+0100", + "number": 11, + "platform": 11, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:15:00+0100", + "number": 12, + "platform": 12, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:16:00+0100", + "number": 13, + "platform": 13, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:17:00+0100", + "number": 14, + "platform": 14, + "transfers": 0, + "duration": "10", + "delay": 0 + }, + { + "departure": "2024-01-06T18:18:00+0100", + "number": 15, + "platform": 15, + "transfers": 0, + "duration": "10", + "delay": 0 + } +] diff --git a/tests/components/swiss_public_transport/test_init.py b/tests/components/swiss_public_transport/test_init.py index 47360f93cf2..7ee8b696499 100644 --- a/tests/components/swiss_public_transport/test_init.py +++ b/tests/components/swiss_public_transport/test_init.py @@ -1,4 +1,4 @@ -"""Test the swiss_public_transport config flow.""" +"""Test the swiss_public_transport integration.""" from unittest.mock import AsyncMock, patch diff --git a/tests/components/swiss_public_transport/test_service.py b/tests/components/swiss_public_transport/test_service.py new file mode 100644 index 00000000000..4009327e77d --- /dev/null +++ b/tests/components/swiss_public_transport/test_service.py @@ -0,0 +1,224 @@ +"""Test the swiss_public_transport service.""" + +import json +import logging +from unittest.mock import AsyncMock, patch + +from opendata_transport.exceptions import ( + OpendataTransportConnectionError, + OpendataTransportError, +) +import pytest +from voluptuous import error as vol_er + +from homeassistant.components.swiss_public_transport.const import ( + ATTR_CONFIG_ENTRY_ID, + ATTR_LIMIT, + CONF_DESTINATION, + CONF_START, + CONNECTIONS_COUNT, + CONNECTIONS_MAX, + DOMAIN, + SERVICE_FETCH_CONNECTIONS, +) +from homeassistant.components.swiss_public_transport.helper import unique_id_from_config +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError + +from . import setup_integration + +from tests.common import MockConfigEntry, load_fixture + +_LOGGER = logging.getLogger(__name__) + +MOCK_DATA_STEP_BASE = { + CONF_START: "test_start", + CONF_DESTINATION: "test_destination", +} + + +@pytest.mark.parametrize( + ("data", "config_data"), + [ + ({ATTR_LIMIT: 1}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: 2}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: 3}, MOCK_DATA_STEP_BASE), + ({ATTR_LIMIT: CONNECTIONS_MAX}, MOCK_DATA_STEP_BASE), + ({}, MOCK_DATA_STEP_BASE), + ], +) +async def test_service_call_fetch_connections_success( + hass: HomeAssistant, + data: dict, + config_data, +) -> None: + """Test the fetch_connections service.""" + + unique_id = unique_id_from_config(config_data) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config_data, + title=f"Service test call with data={data}", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN))[ + 0 : data.get(ATTR_LIMIT, CONNECTIONS_COUNT) + 2 + ] + + await setup_integration(hass, config_entry) + + data[ATTR_CONFIG_ENTRY_ID] = config_entry.entry_id + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + response = await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data=data, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() + assert response["connections"] is not None + assert len(response["connections"]) == data.get(ATTR_LIMIT, CONNECTIONS_COUNT) + + +@pytest.mark.parametrize( + ("limit", "config_data", "expected_result", "raise_error"), + [ + (-1, MOCK_DATA_STEP_BASE, pytest.raises(vol_er.MultipleInvalid), None), + (0, MOCK_DATA_STEP_BASE, pytest.raises(vol_er.MultipleInvalid), None), + ( + CONNECTIONS_MAX + 1, + MOCK_DATA_STEP_BASE, + pytest.raises(vol_er.MultipleInvalid), + None, + ), + ( + 1, + MOCK_DATA_STEP_BASE, + pytest.raises(HomeAssistantError), + OpendataTransportConnectionError(), + ), + ( + 2, + MOCK_DATA_STEP_BASE, + pytest.raises(HomeAssistantError), + OpendataTransportError(), + ), + ], +) +async def test_service_call_fetch_connections_error( + hass: HomeAssistant, + limit, + config_data, + expected_result, + raise_error, +) -> None: + """Test service call with standard error.""" + + unique_id = unique_id_from_config(config_data) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=config_data, + title=f"Service test call with limit={limit} and error={raise_error}", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN)) + + await setup_integration(hass, config_entry) + + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + mock().async_get_data.side_effect = raise_error + with expected_result: + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + ATTR_LIMIT: limit, + }, + blocking=True, + return_response=True, + ) + + +async def test_service_call_load_unload( + hass: HomeAssistant, +) -> None: + """Test service call with integration error.""" + + unique_id = unique_id_from_config(MOCK_DATA_STEP_BASE) + + config_entry = MockConfigEntry( + domain=DOMAIN, + data=MOCK_DATA_STEP_BASE, + title="Service test call for unloaded entry", + unique_id=unique_id, + entry_id=f"entry_{unique_id}", + ) + + bad_entry_id = "bad_entry_id" + + with patch( + "homeassistant.components.swiss_public_transport.OpendataTransport", + return_value=AsyncMock(), + ) as mock: + mock().connections = json.loads(load_fixture("connections.json", DOMAIN)) + + await setup_integration(hass, config_entry) + + assert hass.services.has_service(DOMAIN, SERVICE_FETCH_CONNECTIONS) + response = await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + await hass.async_block_till_done() + assert response["connections"] is not None + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + with pytest.raises( + ServiceValidationError, match=f"{config_entry.title} is not loaded" + ): + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: config_entry.entry_id, + }, + blocking=True, + return_response=True, + ) + + with pytest.raises( + ServiceValidationError, + match=f'Swiss public transport integration instance "{bad_entry_id}" not found', + ): + await hass.services.async_call( + domain=DOMAIN, + service=SERVICE_FETCH_CONNECTIONS, + service_data={ + ATTR_CONFIG_ENTRY_ID: bad_entry_id, + }, + blocking=True, + return_response=True, + ) diff --git a/tests/components/switch/common.py b/tests/components/switch/common.py index e9764d59d7c..96c79fb7d55 100644 --- a/tests/components/switch/common.py +++ b/tests/components/switch/common.py @@ -15,28 +15,31 @@ from homeassistant.const import ( STATE_OFF, STATE_ON, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def turn_on(hass, entity_id=ENTITY_MATCH_ALL): +def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified switch on.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified switch on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_off(hass, entity_id=ENTITY_MATCH_ALL): +def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified switch off.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified switch off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) diff --git a/tests/components/switcher_kis/consts.py b/tests/components/switcher_kis/consts.py index 3c5f3ff241e..ffeef64b5d7 100644 --- a/tests/components/switcher_kis/consts.py +++ b/tests/components/switcher_kis/consts.py @@ -38,6 +38,10 @@ DUMMY_MAC_ADDRESS1 = "A1:B2:C3:45:67:D8" DUMMY_MAC_ADDRESS2 = "A1:B2:C3:45:67:D9" DUMMY_MAC_ADDRESS3 = "A1:B2:C3:45:67:DA" DUMMY_MAC_ADDRESS4 = "A1:B2:C3:45:67:DB" +DUMMY_TOKEN_NEEDED1 = False +DUMMY_TOKEN_NEEDED2 = False +DUMMY_TOKEN_NEEDED3 = False +DUMMY_TOKEN_NEEDED4 = False DUMMY_PHONE_ID = "1234" DUMMY_POWER_CONSUMPTION1 = 100 DUMMY_POWER_CONSUMPTION2 = 2780 @@ -60,6 +64,7 @@ DUMMY_PLUG_DEVICE = SwitcherPowerPlug( DUMMY_IP_ADDRESS1, DUMMY_MAC_ADDRESS1, DUMMY_DEVICE_NAME1, + DUMMY_TOKEN_NEEDED1, DUMMY_POWER_CONSUMPTION1, DUMMY_ELECTRIC_CURRENT1, ) @@ -72,6 +77,7 @@ DUMMY_WATER_HEATER_DEVICE = SwitcherWaterHeater( DUMMY_IP_ADDRESS2, DUMMY_MAC_ADDRESS2, DUMMY_DEVICE_NAME2, + DUMMY_TOKEN_NEEDED2, DUMMY_POWER_CONSUMPTION2, DUMMY_ELECTRIC_CURRENT2, DUMMY_REMAINING_TIME, @@ -86,6 +92,7 @@ DUMMY_SHUTTER_DEVICE = SwitcherShutter( DUMMY_IP_ADDRESS4, DUMMY_MAC_ADDRESS4, DUMMY_DEVICE_NAME4, + DUMMY_TOKEN_NEEDED4, DUMMY_POSITION, DUMMY_DIRECTION, ) @@ -98,6 +105,7 @@ DUMMY_THERMOSTAT_DEVICE = SwitcherThermostat( DUMMY_IP_ADDRESS3, DUMMY_MAC_ADDRESS3, DUMMY_DEVICE_NAME3, + DUMMY_TOKEN_NEEDED3, DUMMY_THERMOSTAT_MODE, DUMMY_TEMPERATURE, DUMMY_TARGET_TEMPERATURE, diff --git a/tests/components/switcher_kis/test_cover.py b/tests/components/switcher_kis/test_cover.py index 57e2f98915e..c228da6b556 100644 --- a/tests/components/switcher_kis/test_cover.py +++ b/tests/components/switcher_kis/test_cover.py @@ -105,7 +105,7 @@ async def test_cover( # Test stop with patch( - "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop" + "homeassistant.components.switcher_kis.cover.SwitcherType2Api.stop_shutter" ) as mock_control_device: await hass.services.async_call( COVER_DOMAIN, diff --git a/tests/components/switcher_kis/test_diagnostics.py b/tests/components/switcher_kis/test_diagnostics.py index c8df4dd0b83..89bcefa5138 100644 --- a/tests/components/switcher_kis/test_diagnostics.py +++ b/tests/components/switcher_kis/test_diagnostics.py @@ -40,7 +40,7 @@ async def test_diagnostics( "__type": "", "repr": ( ")>" + "1, , False)>" ), }, "electric_current": 12.8, @@ -50,6 +50,7 @@ async def test_diagnostics( "name": "Heater FE12", "power_consumption": 2780, "remaining_time": "01:29:32", + "token_needed": False, } ], "entry": { diff --git a/tests/components/synology_dsm/test_config_flow.py b/tests/components/synology_dsm/test_config_flow.py index 1574526a701..e5494b7179f 100644 --- a/tests/components/synology_dsm/test_config_flow.py +++ b/tests/components/synology_dsm/test_config_flow.py @@ -21,12 +21,7 @@ from homeassistant.components.synology_dsm.const import ( DEFAULT_SNAPSHOT_QUALITY, DOMAIN, ) -from homeassistant.config_entries import ( - SOURCE_REAUTH, - SOURCE_SSDP, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_SSDP, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import ( CONF_HOST, CONF_MAC, @@ -297,24 +292,7 @@ async def test_reauth(hass: HomeAssistant, service: MagicMock) -> None: ) entry.add_to_hass(hass) - with patch( - "homeassistant.config_entries.ConfigEntries.async_reload", - return_value=True, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - "title_placeholders": {"name": entry.title}, - }, - data={ - CONF_HOST: HOST, - CONF_USERNAME: USERNAME, - CONF_PASSWORD: PASSWORD, - }, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/synology_dsm/test_media_source.py b/tests/components/synology_dsm/test_media_source.py index f7ab26997ba..0c7ab6bc1cc 100644 --- a/tests/components/synology_dsm/test_media_source.py +++ b/tests/components/synology_dsm/test_media_source.py @@ -48,11 +48,15 @@ def dsm_with_photos() -> MagicMock: dsm.surveillance_station.update = AsyncMock(return_value=True) dsm.upgrade.update = AsyncMock(return_value=True) - dsm.photos.get_albums = AsyncMock(return_value=[SynoPhotosAlbum(1, "Album 1", 10)]) + dsm.photos.get_albums = AsyncMock( + return_value=[SynoPhotosAlbum(1, "Album 1", 10, "")] + ) dsm.photos.get_items_from_album = AsyncMock( return_value=[ - SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", False), - SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", True), + SynoPhotosItem( + 10, "", "filename.jpg", 12345, "10_1298753", "sm", False, "" + ), + SynoPhotosItem(10, "", "filename.jpg", 12345, "10_1298753", "sm", True, ""), ] ) dsm.photos.get_item_thumbnail_url = AsyncMock( @@ -96,17 +100,22 @@ async def test_resolve_media_bad_identifier( [ ( "ABC012345/10/27643_876876/filename.jpg", - "/synology_dsm/ABC012345/27643_876876/filename.jpg", + "/synology_dsm/ABC012345/27643_876876/filename.jpg/", "image/jpeg", ), ( "ABC012345/12/12631_47189/filename.png", - "/synology_dsm/ABC012345/12631_47189/filename.png", + "/synology_dsm/ABC012345/12631_47189/filename.png/", "image/png", ), ( "ABC012345/12/12631_47189/filename.png_shared", - "/synology_dsm/ABC012345/12631_47189/filename.png_shared", + "/synology_dsm/ABC012345/12631_47189/filename.png_shared/", + "image/png", + ), + ( + "ABC012345/12_dmypass/12631_47189/filename.png", + "/synology_dsm/ABC012345/12631_47189/filename.png/dmypass", "image/png", ), ], @@ -250,7 +259,7 @@ async def test_browse_media_get_albums( assert result.children[0].identifier == "mocked_syno_dsm_entry/0" assert result.children[0].title == "All images" assert isinstance(result.children[1], BrowseMedia) - assert result.children[1].identifier == "mocked_syno_dsm_entry/1" + assert result.children[1].identifier == "mocked_syno_dsm_entry/1_" assert result.children[1].title == "Album 1" @@ -382,7 +391,7 @@ async def test_browse_media_get_items( assert len(result.children) == 2 item = result.children[0] assert isinstance(item, BrowseMedia) - assert item.identifier == "mocked_syno_dsm_entry/1/10_1298753/filename.jpg" + assert item.identifier == "mocked_syno_dsm_entry/1_/10_1298753/filename.jpg" assert item.title == "filename.jpg" assert item.media_class == MediaClass.IMAGE assert item.media_content_type == "image/jpeg" @@ -391,7 +400,7 @@ async def test_browse_media_get_items( assert item.thumbnail == "http://my.thumbnail.url" item = result.children[1] assert isinstance(item, BrowseMedia) - assert item.identifier == "mocked_syno_dsm_entry/1/10_1298753/filename.jpg_shared" + assert item.identifier == "mocked_syno_dsm_entry/1_/10_1298753/filename.jpg_shared" assert item.title == "filename.jpg" assert item.media_class == MediaClass.IMAGE assert item.media_content_type == "image/jpeg" @@ -435,24 +444,24 @@ async def test_media_view( assert await hass.config_entries.async_setup(entry.entry_id) with pytest.raises(web.HTTPNotFound): - await view.get(request, "", "10_1298753/filename") + await view.get(request, "", "10_1298753/filename/") # exception in download_item() dsm_with_photos.photos.download_item = AsyncMock( side_effect=SynologyDSMException("", None) ) with pytest.raises(web.HTTPNotFound): - await view.get(request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg") + await view.get(request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg/") # success dsm_with_photos.photos.download_item = AsyncMock(return_value=b"xxxx") with patch.object(tempfile, "tempdir", tmp_path): result = await view.get( - request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg" + request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg/" ) assert isinstance(result, web.Response) with patch.object(tempfile, "tempdir", tmp_path): result = await view.get( - request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg_shared" + request, "mocked_syno_dsm_entry", "10_1298753/filename.jpg_shared/" ) assert isinstance(result, web.Response) diff --git a/tests/components/system_health/test_init.py b/tests/components/system_health/test_init.py index e51ab8fab99..2237edc9647 100644 --- a/tests/components/system_health/test_init.py +++ b/tests/components/system_health/test_init.py @@ -1,5 +1,6 @@ """Tests for the system health component init.""" +from typing import Any from unittest.mock import AsyncMock, Mock, patch from aiohttp.client_exceptions import ClientError @@ -14,7 +15,9 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import WebSocketGenerator -async def gather_system_health_info(hass, hass_ws_client): +async def gather_system_health_info( + hass: HomeAssistant, hass_ws_client: WebSocketGenerator +) -> dict[str, Any]: """Gather all info.""" client = await hass_ws_client(hass) @@ -72,7 +75,7 @@ async def test_info_endpoint_register_callback( ) -> None: """Test that the info endpoint allows registering callbacks.""" - async def mock_info(hass): + async def mock_info(hass: HomeAssistant) -> dict[str, Any]: return {"storage": "YAML"} async_register_info(hass, "lovelace", mock_info) @@ -92,7 +95,7 @@ async def test_info_endpoint_register_callback_timeout( ) -> None: """Test that the info endpoint timing out.""" - async def mock_info(hass): + async def mock_info(hass: HomeAssistant) -> dict[str, Any]: raise TimeoutError async_register_info(hass, "lovelace", mock_info) @@ -109,8 +112,8 @@ async def test_info_endpoint_register_callback_exc( ) -> None: """Test that the info endpoint requires auth.""" - async def mock_info(hass): - raise Exception("TEST ERROR") # pylint: disable=broad-exception-raised + async def mock_info(hass: HomeAssistant) -> dict[str, Any]: + raise Exception("TEST ERROR") # noqa: TRY002 async_register_info(hass, "lovelace", mock_info) assert await async_setup_component(hass, "system_health", {}) diff --git a/tests/components/system_log/test_init.py b/tests/components/system_log/test_init.py index fb46d120acf..a81a92681f2 100644 --- a/tests/components/system_log/test_init.py +++ b/tests/components/system_log/test_init.py @@ -36,7 +36,7 @@ async def get_error_log(hass_ws_client): def _generate_and_log_exception(exception, log): try: - raise Exception(exception) # pylint: disable=broad-exception-raised + raise Exception(exception) # noqa: TRY002, TRY301 except Exception: _LOGGER.exception(log) @@ -371,7 +371,9 @@ def get_frame(path: str, previous_frame: MagicMock | None) -> MagicMock: ) -async def async_log_error_from_test_path(hass, path, watcher): +async def async_log_error_from_test_path( + hass: HomeAssistant, path: str, watcher: WatchLogErrorHandler +) -> None: """Log error while mocking the path.""" call_path = "internal_path.py" main_frame = get_frame("main_path/main.py", None) @@ -461,7 +463,7 @@ async def test__figure_out_source(hass: HomeAssistant) -> None: in a test because the test is not a component. """ try: - raise ValueError("test") + raise ValueError("test") # noqa: TRY301 except ValueError as ex: exc_info = (type(ex), ex, ex.__traceback__) mock_record = MagicMock( @@ -486,7 +488,7 @@ async def test__figure_out_source(hass: HomeAssistant) -> None: async def test_formatting_exception(hass: HomeAssistant) -> None: """Test that exceptions are formatted correctly.""" try: - raise ValueError("test") + raise ValueError("test") # noqa: TRY301 except ValueError as ex: exc_info = (type(ex), ex, ex.__traceback__) mock_record = MagicMock( diff --git a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr index b50e051c816..328065f6098 100644 --- a/tests/components/systemmonitor/snapshots/test_diagnostics.ambr +++ b/tests/components/systemmonitor/snapshots/test_diagnostics.ambr @@ -35,7 +35,7 @@ }), 'disabled_by': None, 'domain': 'systemmonitor', - 'minor_version': 2, + 'minor_version': 3, 'options': dict({ 'binary_sensor': dict({ 'process': list([ diff --git a/tests/components/systemmonitor/snapshots/test_sensor.ambr b/tests/components/systemmonitor/snapshots/test_sensor.ambr index 3fe9ae7e809..1ee9067a528 100644 --- a/tests/components/systemmonitor/snapshots/test_sensor.ambr +++ b/tests/components/systemmonitor/snapshots/test_sensor.ambr @@ -300,24 +300,6 @@ # name: test_sensor[System Monitor Packets out eth1 - state] '150' # --- -# name: test_sensor[System Monitor Process pip - attributes] - ReadOnlyDict({ - 'friendly_name': 'System Monitor Process pip', - 'icon': 'mdi:cpu-64-bit', - }) -# --- -# name: test_sensor[System Monitor Process pip - state] - 'on' -# --- -# name: test_sensor[System Monitor Process python3 - attributes] - ReadOnlyDict({ - 'friendly_name': 'System Monitor Process python3', - 'icon': 'mdi:cpu-64-bit', - }) -# --- -# name: test_sensor[System Monitor Process python3 - state] - 'on' -# --- # name: test_sensor[System Monitor Processor temperature - attributes] ReadOnlyDict({ 'device_class': 'temperature', diff --git a/tests/components/systemmonitor/test_init.py b/tests/components/systemmonitor/test_init.py index 97f4a41b96c..6c1e4e6316c 100644 --- a/tests/components/systemmonitor/test_init.py +++ b/tests/components/systemmonitor/test_init.py @@ -95,9 +95,49 @@ async def test_migrate_process_sensor_to_binary_sensors( await hass.config_entries.async_setup(mock_config_entry.entry_id) await hass.async_block_till_done() - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON process_sensor = hass.states.get("binary_sensor.system_monitor_process_python3") assert process_sensor is not None assert process_sensor.state == STATE_ON + + assert mock_config_entry.minor_version == 3 + assert mock_config_entry.options == { + "binary_sensor": {"process": ["python3", "pip"]}, + "resources": [ + "disk_use_percent_/", + "disk_use_percent_/home/notexist/", + "memory_free_", + "network_out_eth0", + "process_python3", + ], + } + + +async def test_migration_from_future_version( + hass: HomeAssistant, + mock_psutil: Mock, + mock_os: Mock, + freezer: FrozenDateTimeFactory, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test migration from future version.""" + mock_config_entry = MockConfigEntry( + title="System Monitor", + domain=DOMAIN, + version=2, + data={}, + options={ + "sensor": {"process": ["python3", "pip"]}, + "resources": [ + "disk_use_percent_/", + "disk_use_percent_/home/notexist/", + "memory_free_", + "network_out_eth0", + "process_python3", + ], + }, + ) + mock_config_entry.add_to_hass(hass) + await hass.config_entries.async_setup(mock_config_entry.entry_id) + await hass.async_block_till_done() + + assert mock_config_entry.state is ConfigEntryState.MIGRATION_ERROR diff --git a/tests/components/systemmonitor/test_repairs.py b/tests/components/systemmonitor/test_repairs.py deleted file mode 100644 index 6c1ff9dfd16..00000000000 --- a/tests/components/systemmonitor/test_repairs.py +++ /dev/null @@ -1,199 +0,0 @@ -"""Test repairs for System Monitor.""" - -from __future__ import annotations - -from http import HTTPStatus -from unittest.mock import Mock - -import pytest -from syrupy.assertion import SnapshotAssertion - -from homeassistant.components.repairs.websocket_api import ( - RepairsFlowIndexView, - RepairsFlowResourceView, -) -from homeassistant.components.systemmonitor.const import DOMAIN -from homeassistant.config_entries import ConfigEntry -from homeassistant.core import HomeAssistant -from homeassistant.data_entry_flow import FlowResultType -from homeassistant.helpers import entity_registry as er, issue_registry as ir -from homeassistant.setup import async_setup_component - -from tests.common import ANY, MockConfigEntry -from tests.typing import ClientSessionGenerator, WebSocketGenerator - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_migrate_process_sensor( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - mock_psutil: Mock, - mock_os: Mock, - hass_client: ClientSessionGenerator, - hass_ws_client: WebSocketGenerator, - snapshot: SnapshotAssertion, -) -> None: - """Test migrating process sensor to binary sensor.""" - mock_config_entry = MockConfigEntry( - title="System Monitor", - domain=DOMAIN, - data={}, - options={ - "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, - "resources": [ - "disk_use_percent_/", - "disk_use_percent_/home/notexist/", - "memory_free_", - "network_out_eth0", - "process_python3", - ], - }, - ) - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - assert hass.config_entries.async_entries(DOMAIN) == snapshot( - name="before_migration" - ) - - assert await async_setup_component(hass, "repairs", {}) - await hass.async_block_till_done() - - entity = "sensor.system_monitor_process_python3" - state = hass.states.get(entity) - assert state - - assert entity_registry.async_get(entity) - - ws_client = await hass_ws_client(hass) - client = await hass_client() - - await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - assert len(msg["result"]["issues"]) > 0 - issue = None - for i in msg["result"]["issues"]: - if i["issue_id"] == "process_sensor": - issue = i - assert issue is not None - - url = RepairsFlowIndexView.url - resp = await client.post( - url, json={"handler": DOMAIN, "issue_id": "process_sensor"} - ) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "migrate_process_sensor" - - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url, json={}) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - # Cannot use identity `is` check here as the value is parsed from JSON - assert data["type"] == FlowResultType.CREATE_ENTRY.value - await hass.async_block_till_done() - - state = hass.states.get("binary_sensor.system_monitor_process_python3") - assert state - - await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - issue = None - for i in msg["result"]["issues"]: - if i["issue_id"] == "migrate_process_sensor": - issue = i - assert not issue - - entity = "sensor.system_monitor_process_python3" - state = hass.states.get(entity) - assert not state - - assert not entity_registry.async_get(entity) - - assert hass.config_entries.async_entries(DOMAIN) == snapshot(name="after_migration") - - -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -async def test_other_fixable_issues( - hass: HomeAssistant, - hass_client: ClientSessionGenerator, - hass_ws_client: WebSocketGenerator, - mock_added_config_entry: ConfigEntry, -) -> None: - """Test fixing other issues.""" - assert await async_setup_component(hass, "repairs", {}) - await hass.async_block_till_done() - - ws_client = await hass_ws_client(hass) - client = await hass_client() - - await ws_client.send_json({"id": 1, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - - issue = { - "breaks_in_ha_version": "2022.9.0dev0", - "domain": DOMAIN, - "issue_id": "issue_1", - "is_fixable": True, - "learn_more_url": "", - "severity": "error", - "translation_key": "issue_1", - } - ir.async_create_issue( - hass, - issue["domain"], - issue["issue_id"], - breaks_in_ha_version=issue["breaks_in_ha_version"], - is_fixable=issue["is_fixable"], - is_persistent=False, - learn_more_url=None, - severity=issue["severity"], - translation_key=issue["translation_key"], - ) - - await ws_client.send_json({"id": 2, "type": "repairs/list_issues"}) - msg = await ws_client.receive_json() - - assert msg["success"] - results = msg["result"]["issues"] - assert { - "breaks_in_ha_version": "2022.9.0dev0", - "created": ANY, - "dismissed_version": None, - "domain": DOMAIN, - "is_fixable": True, - "issue_domain": None, - "issue_id": "issue_1", - "learn_more_url": None, - "severity": "error", - "translation_key": "issue_1", - "translation_placeholders": None, - "ignored": False, - } in results - - url = RepairsFlowIndexView.url - resp = await client.post(url, json={"handler": DOMAIN, "issue_id": "issue_1"}) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - flow_id = data["flow_id"] - assert data["step_id"] == "confirm" - - url = RepairsFlowResourceView.url.format(flow_id=flow_id) - resp = await client.post(url) - assert resp.status == HTTPStatus.OK - data = await resp.json() - - # Cannot use identity `is` check here as the value is parsed from JSON - assert data["type"] == FlowResultType.CREATE_ENTRY.value - await hass.async_block_till_done() diff --git a/tests/components/systemmonitor/test_sensor.py b/tests/components/systemmonitor/test_sensor.py index ce15083da67..6d22c5354a4 100644 --- a/tests/components/systemmonitor/test_sensor.py +++ b/tests/components/systemmonitor/test_sensor.py @@ -14,12 +14,10 @@ from homeassistant.components.systemmonitor.const import DOMAIN from homeassistant.components.systemmonitor.coordinator import VirtualMemory from homeassistant.components.systemmonitor.sensor import get_cpu_icon from homeassistant.config_entries import ConfigEntry -from homeassistant.const import STATE_OFF, STATE_ON, STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er -from .conftest import MockProcess - from tests.common import MockConfigEntry, async_fire_time_changed @@ -38,7 +36,6 @@ async def test_sensor( data={}, options={ "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, "resources": [ "disk_use_percent_/", "disk_use_percent_/home/notexist/", @@ -62,10 +59,6 @@ async def test_sensor( "friendly_name": "System Monitor Memory free", } - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - for entity in er.async_entries_for_config_entry( entity_registry, mock_config_entry.entry_id ): @@ -154,7 +147,6 @@ async def test_sensor_updating( data={}, options={ "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, "resources": [ "disk_use_percent_/", "disk_use_percent_/home/notexist/", @@ -172,10 +164,6 @@ async def test_sensor_updating( assert memory_sensor is not None assert memory_sensor.state == "40.0" - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - mock_psutil.virtual_memory.side_effect = Exception("Failed to update") freezer.tick(timedelta(minutes=1)) async_fire_time_changed(hass) @@ -202,53 +190,6 @@ async def test_sensor_updating( assert memory_sensor.state == "25.0" -async def test_sensor_process_fails( - hass: HomeAssistant, - mock_psutil: Mock, - mock_os: Mock, - freezer: FrozenDateTimeFactory, - caplog: pytest.LogCaptureFixture, -) -> None: - """Test process not exist failure.""" - mock_config_entry = MockConfigEntry( - title="System Monitor", - domain=DOMAIN, - data={}, - options={ - "binary_sensor": {"process": ["python3", "pip"]}, - "sensor": {"process": ["python3", "pip"]}, - "resources": [ - "disk_use_percent_/", - "disk_use_percent_/home/notexist/", - "memory_free_", - "network_out_eth0", - "process_python3", - ], - }, - ) - mock_config_entry.add_to_hass(hass) - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() - - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_ON - - _process = MockProcess("python3", True) - - mock_psutil.process_iter.return_value = [_process] - - freezer.tick(timedelta(minutes=1)) - async_fire_time_changed(hass) - await hass.async_block_till_done(wait_background_tasks=True) - - process_sensor = hass.states.get("sensor.system_monitor_process_python3") - assert process_sensor is not None - assert process_sensor.state == STATE_OFF - - assert "Failed to load process with ID: 1, old name: python3" in caplog.text - - @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_sensor_network_sensors( freezer: FrozenDateTimeFactory, diff --git a/tests/components/tailscale/test_config_flow.py b/tests/components/tailscale/test_config_flow.py index 86daa40d8dc..3a67f46a496 100644 --- a/tests/components/tailscale/test_config_flow.py +++ b/tests/components/tailscale/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, MagicMock from tailscale import TailscaleAuthenticationError, TailscaleConnectionError from homeassistant.components.tailscale.const import CONF_TAILNET, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -128,15 +128,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -170,15 +162,7 @@ async def test_reauth_with_authentication_error( """ mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -222,15 +206,7 @@ async def test_reauth_api_error( """Test API error during reauthentication.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" diff --git a/tests/components/tailwind/test_config_flow.py b/tests/components/tailwind/test_config_flow.py index f70ab6e27ff..d2d15172718 100644 --- a/tests/components/tailwind/test_config_flow.py +++ b/tests/components/tailwind/test_config_flow.py @@ -14,12 +14,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import zeroconf from homeassistant.components.dhcp import DhcpServiceInfo from homeassistant.components.tailwind.const import DOMAIN -from homeassistant.config_entries import ( - SOURCE_DHCP, - SOURCE_REAUTH, - SOURCE_USER, - SOURCE_ZEROCONF, -) +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER, SOURCE_ZEROCONF from homeassistant.const import CONF_HOST, CONF_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -311,15 +306,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) assert mock_config_entry.data[CONF_TOKEN] == "123456" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_confirm" @@ -354,15 +341,7 @@ async def test_reauth_flow_errors( mock_config_entry.add_to_hass(hass) mock_tailwind.status.side_effect = side_effect - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/tankerkoenig/test_config_flow.py b/tests/components/tankerkoenig/test_config_flow.py index 022b49fd3f8..bb1e943bbb9 100644 --- a/tests/components/tankerkoenig/test_config_flow.py +++ b/tests/components/tankerkoenig/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant.components.tankerkoenig.const import ( CONF_STATIONS, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import ( CONF_API_KEY, CONF_LATITUDE, @@ -162,6 +162,10 @@ async def test_user_no_stations(hass: HomeAssistant) -> None: async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: """Test starting a flow by user to re-auth.""" config_entry.add_to_hass(hass) + # re-auth initialized + result = await config_entry.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -171,15 +175,6 @@ async def test_reauth(hass: HomeAssistant, config_entry: MockConfigEntry) -> Non "homeassistant.components.tankerkoenig.config_flow.Tankerkoenig.nearby_stations", ) as mock_nearby_stations, ): - # re-auth initialized - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=config_entry.data, - ) - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - # re-auth unsuccessful mock_nearby_stations.side_effect = TankerkoenigInvalidKeyError("Booom!") result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tasmota/conftest.py b/tests/components/tasmota/conftest.py index 48cd4012f07..e6bb8c61994 100644 --- a/tests/components/tasmota/conftest.py +++ b/tests/components/tasmota/conftest.py @@ -10,6 +10,7 @@ from homeassistant.components.tasmota.const import ( DEFAULT_PREFIX, DOMAIN, ) +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry from tests.components.light.conftest import mock_light_profiles # noqa: F401 @@ -36,7 +37,7 @@ def disable_status_sensor(status_sensor_disabled): yield -async def setup_tasmota_helper(hass): +async def setup_tasmota_helper(hass: HomeAssistant) -> None: """Set up Tasmota.""" hass.config.components.add("tasmota") @@ -55,6 +56,6 @@ async def setup_tasmota_helper(hass): @pytest.fixture -async def setup_tasmota(hass): +async def setup_tasmota(hass: HomeAssistant) -> None: """Set up Tasmota.""" await setup_tasmota_helper(hass) diff --git a/tests/components/tasmota/test_common.py b/tests/components/tasmota/test_common.py index f3d85f019f3..4d2c821fff4 100644 --- a/tests/components/tasmota/test_common.py +++ b/tests/components/tasmota/test_common.py @@ -2,7 +2,8 @@ import copy import json -from unittest.mock import ANY +from typing import Any +from unittest.mock import ANY, AsyncMock from hatasmota.const import ( CONF_DEEP_SLEEP, @@ -19,6 +20,7 @@ from hatasmota.utils import ( get_topic_tele_state, get_topic_tele_will, ) +import pytest from homeassistant.components.tasmota.const import DEFAULT_PREFIX, DOMAIN from homeassistant.const import STATE_UNAVAILABLE @@ -26,7 +28,7 @@ from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr, entity_registry as er from tests.common import async_fire_mqtt_message -from tests.typing import WebSocketGenerator +from tests.typing import MqttMockHAClient, MqttMockPahoClient, WebSocketGenerator DEFAULT_CONFIG = { "ip": "192.168.15.10", @@ -125,14 +127,14 @@ async def remove_device( async def help_test_availability_when_connection_lost( - hass, - mqtt_client_mock, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test availability after MQTT disconnection. This is a test helper for the TasmotaAvailability mixin. @@ -191,14 +193,14 @@ async def help_test_availability_when_connection_lost( async def help_test_deep_sleep_availability_when_connection_lost( - hass, - mqtt_client_mock, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test availability after MQTT disconnection when deep sleep is enabled. This is a test helper for the TasmotaAvailability mixin. @@ -261,13 +263,13 @@ async def help_test_deep_sleep_availability_when_connection_lost( async def help_test_availability( - hass, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test availability. This is a test helper for the TasmotaAvailability mixin. @@ -309,13 +311,13 @@ async def help_test_availability( async def help_test_deep_sleep_availability( - hass, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test availability when deep sleep is enabled. This is a test helper for the TasmotaAvailability mixin. @@ -358,13 +360,13 @@ async def help_test_deep_sleep_availability( async def help_test_availability_discovery_update( - hass, - mqtt_mock, - domain, - config, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test update of discovered TasmotaAvailability. This is a test helper for the TasmotaAvailability mixin. @@ -434,15 +436,15 @@ async def help_test_availability_discovery_update( async def help_test_availability_poll_state( - hass, - mqtt_client_mock, - mqtt_mock, - domain, - config, - poll_topic, - poll_payload, - sensor_config=None, -): + hass: HomeAssistant, + mqtt_client_mock: MqttMockPahoClient, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + poll_topic: str, + poll_payload: str, + sensor_config: dict[str, Any] | None = None, +) -> None: """Test polling of state when device is available. This is a test helper for the TasmotaAvailability mixin. @@ -503,17 +505,17 @@ async def help_test_availability_poll_state( async def help_test_discovery_removal( - hass, - mqtt_mock, - caplog, - domain, - config1, - config2, - sensor_config1=None, - sensor_config2=None, - object_id="tasmota_test", - name="Tasmota Test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + caplog: pytest.LogCaptureFixture, + domain: str, + config1: dict[str, Any], + config2: dict[str, Any], + sensor_config1: dict[str, Any] | None = None, + sensor_config2: dict[str, Any] | None = None, + object_id: str = "tasmota_test", + name: str = "Tasmota Test", +) -> None: """Test removal of discovered entity.""" device_reg = dr.async_get(hass) entity_reg = er.async_get(hass) @@ -569,16 +571,16 @@ async def help_test_discovery_removal( async def help_test_discovery_update_unchanged( - hass, - mqtt_mock, - caplog, - domain, - config, - discovery_update, - sensor_config=None, - object_id="tasmota_test", - name="Tasmota Test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + caplog: pytest.LogCaptureFixture, + domain: str, + config: dict[str, Any], + discovery_update: AsyncMock, + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", + name: str = "Tasmota Test", +) -> None: """Test update of discovered component with and without changes. This is a test helper for the MqttDiscoveryUpdate mixin. @@ -623,8 +625,13 @@ async def help_test_discovery_update_unchanged( async def help_test_discovery_device_remove( - hass, mqtt_mock, domain, unique_id, config, sensor_config=None -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + unique_id: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, +) -> None: """Test domain entity is removed when device is removed.""" device_reg = dr.async_get(hass) entity_reg = er.async_get(hass) @@ -659,14 +666,14 @@ async def help_test_discovery_device_remove( async def help_test_entity_id_update_subscriptions( - hass, - mqtt_mock, - domain, - config, - topics=None, - sensor_config=None, - object_id="tasmota_test", -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + topics: list[str] | None = None, + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test MQTT subscriptions are managed when entity_id is updated.""" entity_reg = er.async_get(hass) @@ -711,8 +718,13 @@ async def help_test_entity_id_update_subscriptions( async def help_test_entity_id_update_discovery_update( - hass, mqtt_mock, domain, config, sensor_config=None, object_id="tasmota_test" -): + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + domain: str, + config: dict[str, Any], + sensor_config: dict[str, Any] | None = None, + object_id: str = "tasmota_test", +) -> None: """Test MQTT discovery update after entity_id is updated.""" entity_reg = er.async_get(hass) diff --git a/tests/components/tasmota/test_cover.py b/tests/components/tasmota/test_cover.py index 7da3cdbd1ec..70bf33d0105 100644 --- a/tests/components/tasmota/test_cover.py +++ b/tests/components/tasmota/test_cover.py @@ -2,6 +2,7 @@ import copy import json +from typing import Any from unittest.mock import patch from hatasmota.utils import ( @@ -464,7 +465,9 @@ async def test_controlling_state_via_mqtt_inverted( assert state.attributes["current_position"] == 0 -async def call_service(hass, entity_id, service, **kwargs): +async def call_service( + hass: HomeAssistant, entity_id: str, service: str, **kwargs: Any +) -> None: """Call a fan service.""" await hass.services.async_call( cover.DOMAIN, diff --git a/tests/components/tasmota/test_init.py b/tests/components/tasmota/test_init.py index 125dba811e6..2765ed724ea 100644 --- a/tests/components/tasmota/test_init.py +++ b/tests/components/tasmota/test_init.py @@ -5,6 +5,7 @@ import json from unittest.mock import call from homeassistant.components.tasmota.const import DEFAULT_PREFIX, DOMAIN +from homeassistant.config_entries import ConfigEntry from homeassistant.core import HomeAssistant from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -74,7 +75,9 @@ async def test_device_remove_non_tasmota_device( """Test removing a non Tasmota device through device registry.""" assert await async_setup_component(hass, "config", {}) - async def async_remove_config_entry_device(hass, config_entry, device_entry): + async def async_remove_config_entry_device( + hass: HomeAssistant, config_entry: ConfigEntry, device_entry: dr.DeviceEntry + ) -> bool: return True mock_integration( diff --git a/tests/components/tasmota/test_light.py b/tests/components/tasmota/test_light.py index c4c3f0ec8dc..f5802c509bf 100644 --- a/tests/components/tasmota/test_light.py +++ b/tests/components/tasmota/test_light.py @@ -2,6 +2,7 @@ import copy import json +from typing import Any from unittest.mock import patch from hatasmota.const import CONF_MAC @@ -1478,7 +1479,13 @@ async def test_relay_as_light( assert state is not None -async def _test_split_light(hass, mqtt_mock, config, num_lights, num_switches): +async def _test_split_light( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + config: dict[str, Any], + num_lights: int, + num_switches: int, +) -> None: """Test multi-channel light split to single-channel dimmers.""" mac = config["mac"] @@ -1553,7 +1560,12 @@ async def test_split_light2( await _test_split_light(hass, mqtt_mock, config, 5, 2) -async def _test_unlinked_light(hass, mqtt_mock, config, num_switches): +async def _test_unlinked_light( + hass: HomeAssistant, + mqtt_mock: MqttMockHAClient, + config: dict[str, Any], + num_switches: int, +) -> None: """Test rgbww light split to rgb+ww.""" mac = config["mac"] num_lights = 2 diff --git a/tests/components/tautulli/test_config_flow.py b/tests/components/tautulli/test_config_flow.py index ca563cfad77..722fd0a7616 100644 --- a/tests/components/tautulli/test_config_flow.py +++ b/tests/components/tautulli/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch from pytautulli import exceptions from homeassistant.components.tautulli.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_SOURCE, CONF_URL, CONF_VERIFY_SSL from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -156,15 +156,7 @@ async def test_flow_reauth( """Test reauth flow.""" with patch("homeassistant.components.tautulli.PLATFORMS", []): entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - CONF_SOURCE: SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=CONF_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} @@ -193,14 +185,7 @@ async def test_flow_reauth_error( """Test reauth flow with invalid authentication.""" with patch("homeassistant.components.tautulli.PLATFORMS", []): entry = await setup_integration(hass, aioclient_mock) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - ) + result = await entry.start_reauth_flow(hass) with patch_config_flow_tautulli(AsyncMock()) as tautullimock: tautullimock.side_effect = exceptions.PyTautulliAuthenticationException result = await hass.config_entries.flow.async_configure( diff --git a/tests/components/technove/fixtures/station_charging.json b/tests/components/technove/fixtures/station_charging.json index 63e68d0db0e..4f50bf1a645 100644 --- a/tests/components/technove/fixtures/station_charging.json +++ b/tests/components/technove/fixtures/station_charging.json @@ -6,7 +6,7 @@ "current": 23.75, "network_ssid": "Connecting...", "id": "AA:AA:AA:AA:AA:BB", - "auto_charge": true, + "auto_charge": false, "highChargePeriodActive": false, "normalPeriodActive": false, "maxChargePourcentage": 0.9, diff --git a/tests/components/technove/snapshots/test_diagnostics.ambr b/tests/components/technove/snapshots/test_diagnostics.ambr index 2e81f124ba5..175e8f2022a 100644 --- a/tests/components/technove/snapshots/test_diagnostics.ambr +++ b/tests/components/technove/snapshots/test_diagnostics.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_diagnostics dict({ - 'auto_charge': True, + 'auto_charge': False, 'conflict_in_sharing_config': False, 'current': 23.75, 'energy_session': 12.34, diff --git a/tests/components/technove/snapshots/test_switch.ambr b/tests/components/technove/snapshots/test_switch.ambr index 1a707971fc8..6febc8c768c 100644 --- a/tests/components/technove/snapshots/test_switch.ambr +++ b/tests/components/technove/snapshots/test_switch.ambr @@ -42,6 +42,52 @@ 'last_changed': , 'last_reported': , 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_switches[switch.technove_station_charging_enabled-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.technove_station_charging_enabled', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Charging Enabled', + 'platform': 'technove', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'session_active', + 'unique_id': 'AA:AA:AA:AA:AA:BB_session_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_switches[switch.technove_station_charging_enabled-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'TechnoVE Station Charging Enabled', + }), + 'context': , + 'entity_id': 'switch.technove_station_charging_enabled', + 'last_changed': , + 'last_reported': , + 'last_updated': , 'state': 'on', }) # --- diff --git a/tests/components/technove/test_binary_sensor.py b/tests/components/technove/test_binary_sensor.py index 0ee4f3f3db7..0a90093779e 100644 --- a/tests/components/technove/test_binary_sensor.py +++ b/tests/components/technove/test_binary_sensor.py @@ -8,7 +8,7 @@ import pytest from syrupy import SnapshotAssertion from technove import TechnoVEError -from homeassistant.const import STATE_ON, STATE_UNAVAILABLE, Platform +from homeassistant.const import STATE_OFF, STATE_UNAVAILABLE, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -43,7 +43,10 @@ async def test_sensors( @pytest.mark.parametrize( "entity_id", - ["binary_sensor.technove_station_static_ip"], + [ + "binary_sensor.technove_station_static_ip", + "binary_sensor.technove_station_charging", + ], ) @pytest.mark.usefixtures("init_integration") async def test_disabled_by_default_binary_sensors( @@ -64,9 +67,9 @@ async def test_binary_sensor_update_failure( freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator update failure.""" - entity_id = "binary_sensor.technove_station_charging" + entity_id = "binary_sensor.technove_station_power_sharing_mode" - assert hass.states.get(entity_id).state == STATE_ON + assert hass.states.get(entity_id).state == STATE_OFF mock_technove.update.side_effect = TechnoVEError("Test error") freezer.tick(timedelta(minutes=5, seconds=1)) diff --git a/tests/components/technove/test_switch.py b/tests/components/technove/test_switch.py index b1a66607f66..dc0293b6443 100644 --- a/tests/components/technove/test_switch.py +++ b/tests/components/technove/test_switch.py @@ -15,7 +15,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from . import setup_with_selected_platforms @@ -53,6 +53,12 @@ async def test_switches( {"enabled": True}, {"enabled": False}, ), + ( + "switch.technove_station_charging_enabled", + "set_charging_enabled", + {"enabled": True}, + {"enabled": False}, + ), ], ) @pytest.mark.usefixtures("init_integration") @@ -96,6 +102,10 @@ async def test_switch_on_off( "switch.technove_station_auto_charge", "set_auto_charge", ), + ( + "switch.technove_station_charging_enabled", + "set_charging_enabled", + ), ], ) @pytest.mark.usefixtures("init_integration") @@ -130,6 +140,10 @@ async def test_invalid_response( "switch.technove_station_auto_charge", "set_auto_charge", ), + ( + "switch.technove_station_charging_enabled", + "set_charging_enabled", + ), ], ) @pytest.mark.usefixtures("init_integration") @@ -157,3 +171,31 @@ async def test_connection_error( assert method_mock.call_count == 1 assert (state := hass.states.get(state.entity_id)) assert state.state == STATE_UNAVAILABLE + + +@pytest.mark.usefixtures("init_integration") +async def test_disable_charging_auto_charge( + hass: HomeAssistant, + mock_technove: MagicMock, +) -> None: + """Test failure to disable charging when the station is in auto charge mode.""" + entity_id = "switch.technove_station_charging_enabled" + state = hass.states.get(entity_id) + + # Enable auto-charge mode + device = mock_technove.update.return_value + device.info.auto_charge = True + + with pytest.raises( + ServiceValidationError, + match="auto-charge is enabled", + ): + await hass.services.async_call( + SWITCH_DOMAIN, + SERVICE_TURN_OFF, + {ATTR_ENTITY_ID: entity_id}, + blocking=True, + ) + + assert (state := hass.states.get(state.entity_id)) + assert state.state != STATE_UNAVAILABLE diff --git a/tests/components/tedee/test_config_flow.py b/tests/components/tedee/test_config_flow.py index d5dc5d4efcf..0fa3d62c26e 100644 --- a/tests/components/tedee/test_config_flow.py +++ b/tests/components/tedee/test_config_flow.py @@ -10,7 +10,7 @@ from pytedee_async import ( import pytest from homeassistant.components.tedee.const import CONF_LOCAL_ACCESS_TOKEN, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_RECONFIGURE, SOURCE_USER +from homeassistant.config_entries import SOURCE_RECONFIGURE, SOURCE_USER from homeassistant.const import CONF_HOST, CONF_WEBHOOK_ID from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -122,18 +122,7 @@ async def test_reauth_flow( mock_config_entry.add_to_hass(hass) - reauth_result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data={ - CONF_LOCAL_ACCESS_TOKEN: LOCAL_ACCESS_TOKEN, - CONF_HOST: "192.168.1.42", - }, - ) + reauth_result = await mock_config_entry.start_reauth_flow(hass) result = await hass.config_entries.flow.async_configure( reauth_result["flow_id"], diff --git a/tests/components/telegram_bot/conftest.py b/tests/components/telegram_bot/conftest.py index 6ea5d1446dd..1afe70dcb8a 100644 --- a/tests/components/telegram_bot/conftest.py +++ b/tests/components/telegram_bot/conftest.py @@ -1,6 +1,8 @@ """Tests for the telegram_bot integration.""" +from collections.abc import AsyncGenerator, Generator from datetime import datetime +from typing import Any from unittest.mock import patch import pytest @@ -18,11 +20,12 @@ from homeassistant.const import ( CONF_URL, EVENT_HOMEASSISTANT_START, ) +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component @pytest.fixture -def config_webhooks(): +def config_webhooks() -> dict[str, Any]: """Fixture for a webhooks platform configuration.""" return { DOMAIN: [ @@ -43,7 +46,7 @@ def config_webhooks(): @pytest.fixture -def config_polling(): +def config_polling() -> dict[str, Any]: """Fixture for a polling platform configuration.""" return { DOMAIN: [ @@ -62,7 +65,7 @@ def config_polling(): @pytest.fixture -def mock_register_webhook(): +def mock_register_webhook() -> Generator[None]: """Mock calls made by telegram_bot when (de)registering webhook.""" with ( patch( @@ -78,7 +81,7 @@ def mock_register_webhook(): @pytest.fixture -def mock_external_calls(): +def mock_external_calls() -> Generator[None]: """Mock calls that make calls to the live Telegram API.""" test_user = User(123456, "Testbot", True) message = Message( @@ -109,7 +112,7 @@ def mock_external_calls(): @pytest.fixture -def mock_generate_secret_token(): +def mock_generate_secret_token() -> Generator[str]: """Mock secret token generated for webhook.""" mock_secret_token = "DEADBEEF12345678DEADBEEF87654321" with patch( @@ -217,12 +220,12 @@ def update_callback_query(): @pytest.fixture async def webhook_platform( - hass, - config_webhooks, - mock_register_webhook, - mock_external_calls, - mock_generate_secret_token, -): + hass: HomeAssistant, + config_webhooks: dict[str, Any], + mock_register_webhook: None, + mock_external_calls: None, + mock_generate_secret_token: str, +) -> AsyncGenerator[None]: """Fixture for setting up the webhooks platform using appropriate config and mocks.""" await async_setup_component( hass, @@ -235,7 +238,9 @@ async def webhook_platform( @pytest.fixture -async def polling_platform(hass, config_polling, mock_external_calls): +async def polling_platform( + hass: HomeAssistant, config_polling: dict[str, Any], mock_external_calls: None +) -> None: """Fixture for setting up the polling platform using appropriate config and mocks.""" await async_setup_component( hass, diff --git a/tests/components/telegram_bot/test_telegram_bot.py b/tests/components/telegram_bot/test_telegram_bot.py index aad758827ca..bdf6ba72fcc 100644 --- a/tests/components/telegram_bot/test_telegram_bot.py +++ b/tests/components/telegram_bot/test_telegram_bot.py @@ -1,8 +1,11 @@ """Tests for the telegram_bot component.""" -from unittest.mock import AsyncMock, patch +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch +import pytest from telegram import Update +from telegram.error import NetworkError, RetryAfter, TelegramError, TimedOut from homeassistant.components.telegram_bot import ( ATTR_MESSAGE, @@ -11,6 +14,7 @@ from homeassistant.components.telegram_bot import ( SERVICE_SEND_MESSAGE, ) from homeassistant.components.telegram_bot.webhooks import TELEGRAM_WEBHOOK_URL +from homeassistant.const import EVENT_HOMEASSISTANT_START from homeassistant.core import Context, HomeAssistant from homeassistant.setup import async_setup_component @@ -188,6 +192,103 @@ async def test_polling_platform_message_text_update( assert isinstance(events[0].context, Context) +@pytest.mark.parametrize( + ("error", "log_message"), + [ + ( + TelegramError("Telegram error"), + 'caused error: "Telegram error"', + ), + (NetworkError("Network error"), ""), + (RetryAfter(42), ""), + (TimedOut("TimedOut error"), ""), + ], +) +async def test_polling_platform_add_error_handler( + hass: HomeAssistant, + config_polling: dict[str, Any], + update_message_text: dict[str, Any], + caplog: pytest.LogCaptureFixture, + error: Exception, + log_message: str, +) -> None: + """Test polling add error handler.""" + with patch( + "homeassistant.components.telegram_bot.polling.ApplicationBuilder" + ) as application_builder_class: + await async_setup_component( + hass, + DOMAIN, + config_polling, + ) + await hass.async_block_till_done() + + application = ( + application_builder_class.return_value.bot.return_value.build.return_value + ) + application.updater.stop = AsyncMock() + application.stop = AsyncMock() + application.shutdown = AsyncMock() + process_error = application.add_error_handler.call_args[0][0] + application.bot.defaults.tzinfo = None + update = Update.de_json(update_message_text, application.bot) + + await process_error(update, MagicMock(error=error)) + + assert log_message in caplog.text + + +@pytest.mark.parametrize( + ("error", "log_message"), + [ + ( + TelegramError("Telegram error"), + "TelegramError: Telegram error", + ), + (NetworkError("Network error"), ""), + (RetryAfter(42), ""), + (TimedOut("TimedOut error"), ""), + ], +) +async def test_polling_platform_start_polling_error_callback( + hass: HomeAssistant, + config_polling: dict[str, Any], + caplog: pytest.LogCaptureFixture, + error: Exception, + log_message: str, +) -> None: + """Test polling add error handler.""" + with patch( + "homeassistant.components.telegram_bot.polling.ApplicationBuilder" + ) as application_builder_class: + await async_setup_component( + hass, + DOMAIN, + config_polling, + ) + await hass.async_block_till_done() + + application = ( + application_builder_class.return_value.bot.return_value.build.return_value + ) + application.initialize = AsyncMock() + application.updater.start_polling = AsyncMock() + application.start = AsyncMock() + application.updater.stop = AsyncMock() + application.stop = AsyncMock() + application.shutdown = AsyncMock() + + hass.bus.async_fire(EVENT_HOMEASSISTANT_START) + await hass.async_block_till_done() + error_callback = application.updater.start_polling.call_args.kwargs[ + "error_callback" + ] + + error_callback(error) + + assert log_message in caplog.text + + async def test_webhook_endpoint_unauthorized_update_doesnt_generate_telegram_text_event( hass: HomeAssistant, webhook_platform, diff --git a/tests/components/tellduslive/test_config_flow.py b/tests/components/tellduslive/test_config_flow.py index c575e7fb5c1..abce2858bf3 100644 --- a/tests/components/tellduslive/test_config_flow.py +++ b/tests/components/tellduslive/test_config_flow.py @@ -20,7 +20,9 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry -def init_config_flow(hass, side_effect=None): +def init_config_flow( + hass: HomeAssistant, side_effect: type[Exception] | None = None +) -> config_flow.FlowHandler: """Init a configuration flow.""" flow = config_flow.FlowHandler() flow.hass = hass diff --git a/tests/components/template/snapshots/test_number.ambr b/tests/components/template/snapshots/test_number.ambr new file mode 100644 index 00000000000..d6f5b1e338d --- /dev/null +++ b/tests/components/template/snapshots/test_number.ambr @@ -0,0 +1,18 @@ +# serializer version: 1 +# name: test_setup_config_entry + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'My template', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 0.1, + }), + 'context': , + 'entity_id': 'number.my_template', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10.0', + }) +# --- \ No newline at end of file diff --git a/tests/components/template/snapshots/test_select.ambr b/tests/components/template/snapshots/test_select.ambr index d4cabb2900f..e2142394cba 100644 --- a/tests/components/template/snapshots/test_select.ambr +++ b/tests/components/template/snapshots/test_select.ambr @@ -16,4 +16,4 @@ 'last_updated': , 'state': 'on', }) -# --- \ No newline at end of file +# --- diff --git a/tests/components/template/snapshots/test_weather.ambr b/tests/components/template/snapshots/test_weather.ambr index 9b0cf2b9471..bdda5b44e94 100644 --- a/tests/components/template/snapshots/test_weather.ambr +++ b/tests/components/template/snapshots/test_weather.ambr @@ -1,87 +1,4 @@ # serializer version: 1 -# name: test_forecasts[config0-1-weather-forecast] - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].1 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].2 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-forecast].3 - dict({ - 'weather.forecast': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 16.9, - }), - ]), - }), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast] - dict({ - 'forecast': list([ - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].1 - dict({ - 'forecast': list([ - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather-get_forecast].3 - dict({ - 'forecast': list([ - ]), - }) -# --- # name: test_forecasts[config0-1-weather-get_forecasts] dict({ 'weather.forecast': dict({ @@ -120,51 +37,6 @@ }), }) # --- -# name: test_forecasts[config0-1-weather] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'fog', - 'datetime': '2023-02-17T14:00:00+00:00', - 'is_daytime': True, - 'temperature': 14.2, - }), - ]), - }) -# --- -# name: test_forecasts[config0-1-weather].3 - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-02-17T14:00:00+00:00', - 'temperature': 16.9, - }), - ]), - }) -# --- # name: test_restore_weather_save_state dict({ 'last_apparent_temperature': None, @@ -180,92 +52,6 @@ 'last_wind_speed': None, }) # --- -# name: test_trigger_weather_services[config0-1-template-forecast] - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-forecast].1 - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-forecast].2 - dict({ - 'weather.test': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template-get_forecast].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- # name: test_trigger_weather_services[config0-1-template-get_forecasts] dict({ 'weather.test': dict({ @@ -312,43 +98,3 @@ }), }) # --- -# name: test_trigger_weather_services[config0-1-template] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- -# name: test_trigger_weather_services[config0-1-template].2 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2023-10-19T06:50:05-07:00', - 'is_daytime': True, - 'precipitation': 20.0, - 'temperature': 20.0, - 'templow': 15.0, - }), - ]), - }) -# --- diff --git a/tests/components/template/test_alarm_control_panel.py b/tests/components/template/test_alarm_control_panel.py index 6a2a95a64eb..ea63d7b9926 100644 --- a/tests/components/template/test_alarm_control_panel.py +++ b/tests/components/template/test_alarm_control_panel.py @@ -244,7 +244,7 @@ async def test_template_syntax_error( "platform": "template", "panels": { "test_template_panel": { - "name": "Template Alarm Panel", + "name": '{{ "Template Alarm Panel" }}', "value_template": "disarmed", **OPTIMISTIC_TEMPLATE_ALARM_CONFIG, } diff --git a/tests/components/template/test_button.py b/tests/components/template/test_button.py index 72c3d2351f5..b201385240c 100644 --- a/tests/components/template/test_button.py +++ b/tests/components/template/test_button.py @@ -1,6 +1,7 @@ """The tests for the Template button platform.""" import datetime as dt +from typing import Any from freezegun.api import FrozenDateTimeFactory import pytest @@ -232,11 +233,11 @@ async def test_unique_id(hass: HomeAssistant) -> None: def _verify( - hass, - expected_value, - attributes=None, - entity_id=_TEST_BUTTON, -): + hass: HomeAssistant, + expected_value: str, + attributes: dict[str, Any] | None = None, + entity_id: str = _TEST_BUTTON, +) -> None: """Verify button's state.""" attributes = attributes or {} if CONF_FRIENDLY_NAME not in attributes: diff --git a/tests/components/template/test_config_flow.py b/tests/components/template/test_config_flow.py index ff5db52d667..f8ab190e664 100644 --- a/tests/components/template/test_config_flow.py +++ b/tests/components/template/test_config_flow.py @@ -91,6 +91,34 @@ from tests.typing import WebSocketGenerator {"verify_ssl": True}, {}, ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + "30.0", + {"one": "30.0", "two": "20.0"}, + {}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + }, + {}, + ), ( "select", {"state": "{{ states('select.one') }}"}, @@ -226,6 +254,20 @@ async def test_config_flow( {"verify_ssl": True}, {"verify_ssl": True}, ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + ), ( "select", {"state": "{{ states('select.one') }}"}, @@ -402,6 +444,34 @@ def get_suggested(schema, key): }, "url", ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + {"state": "{{ states('number.two') }}"}, + ["30.0", "20.0"], + {"one": "30.0", "two": "20.0"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + }, + "state", + ), ( "select", {"state": "{{ states('select.one') }}"}, @@ -1156,6 +1226,20 @@ async def test_option_flow_sensor_preview_config_entry_removed( {}, {}, ), + ( + "number", + {"state": "{{ states('number.one') }}"}, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + { + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + }, + ), ( "select", {"state": "{{ states('select.one') }}"}, diff --git a/tests/components/template/test_fan.py b/tests/components/template/test_fan.py index 82ad4ede91c..40966d5557c 100644 --- a/tests/components/template/test_fan.py +++ b/tests/components/template/test_fan.py @@ -699,13 +699,13 @@ async def test_set_invalid_osc(hass: HomeAssistant, calls: list[ServiceCall]) -> def _verify( - hass, - expected_state, - expected_percentage, - expected_oscillating, - expected_direction, - expected_preset_mode, -): + hass: HomeAssistant, + expected_state: str, + expected_percentage: int | None, + expected_oscillating: bool | None, + expected_direction: str | None, + expected_preset_mode: str | None, +) -> None: """Verify fan's state, speed and osc.""" state = hass.states.get(_TEST_FAN) attributes = state.attributes @@ -716,7 +716,7 @@ def _verify( assert attributes.get(ATTR_PRESET_MODE) == expected_preset_mode -async def _register_fan_sources(hass): +async def _register_fan_sources(hass: HomeAssistant) -> None: with assert_setup_component(1, "input_boolean"): assert await setup.async_setup_component( hass, "input_boolean", {"input_boolean": {"state": None}} @@ -760,8 +760,11 @@ async def _register_fan_sources(hass): async def _register_components( - hass, speed_list=None, preset_modes=None, speed_count=None -): + hass: HomeAssistant, + speed_list: list[str] | None = None, + preset_modes: list[str] | None = None, + speed_count: int | None = None, +) -> None: """Register basic components for testing.""" await _register_fan_sources(hass) diff --git a/tests/components/template/test_init.py b/tests/components/template/test_init.py index fe08e1f4963..3b4db4bf668 100644 --- a/tests/components/template/test_init.py +++ b/tests/components/template/test_init.py @@ -258,7 +258,7 @@ async def test_reload_sensors_that_reference_other_template_sensors( assert hass.states.get("sensor.test3").state == "2" -async def async_yaml_patch_helper(hass, filename): +async def async_yaml_patch_helper(hass: HomeAssistant, filename: str) -> None: """Help update configuration.yaml.""" yaml_path = get_fixture_path(filename, "template") with patch.object(config, "YAML_CONFIG_FILE", yaml_path): @@ -314,6 +314,32 @@ async def async_yaml_patch_helper(hass, filename): }, {}, ), + ( + { + "template_type": "number", + "name": "My template", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + }, + { + "state": "{{ 11 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + }, + ), ( { "template_type": "select", diff --git a/tests/components/template/test_light.py b/tests/components/template/test_light.py index ad97146d0fb..065a1488dc9 100644 --- a/tests/components/template/test_light.py +++ b/tests/components/template/test_light.py @@ -1,5 +1,7 @@ """The tests for the Template light platform.""" +from typing import Any + import pytest from homeassistant.components import light @@ -152,7 +154,9 @@ OPTIMISTIC_RGBWW_COLOR_LIGHT_CONFIG = { } -async def async_setup_light(hass, count, light_config): +async def async_setup_light( + hass: HomeAssistant, count: int, light_config: dict[str, Any] +) -> None: """Do setup of light integration.""" config = {"light": {"platform": "template", "lights": light_config}} @@ -169,7 +173,9 @@ async def async_setup_light(hass, count, light_config): @pytest.fixture -async def setup_light(hass, count, light_config): +async def setup_light( + hass: HomeAssistant, count: int, light_config: dict[str, Any] +) -> None: """Do setup of light integration.""" await async_setup_light(hass, count, light_config) diff --git a/tests/components/template/test_number.py b/tests/components/template/test_number.py index bf04151fd36..fdca94d9fa4 100644 --- a/tests/components/template/test_number.py +++ b/tests/components/template/test_number.py @@ -1,5 +1,7 @@ """The tests for the Template number platform.""" +from syrupy.assertion import SnapshotAssertion + from homeassistant import setup from homeassistant.components.input_number import ( ATTR_VALUE as INPUT_NUMBER_ATTR_VALUE, @@ -14,11 +16,12 @@ from homeassistant.components.number import ( DOMAIN as NUMBER_DOMAIN, SERVICE_SET_VALUE as NUMBER_SERVICE_SET_VALUE, ) +from homeassistant.components.template import DOMAIN from homeassistant.const import ATTR_ICON, CONF_ENTITY_ID, STATE_UNKNOWN from homeassistant.core import Context, HomeAssistant, ServiceCall -from homeassistant.helpers import entity_registry as er +from homeassistant.helpers import device_registry as dr, entity_registry as er -from tests.common import assert_setup_component, async_capture_events +from tests.common import MockConfigEntry, assert_setup_component, async_capture_events _TEST_NUMBER = "number.template_number" # Represent for number's value @@ -42,6 +45,40 @@ _VALUE_INPUT_NUMBER_CONFIG = { } +async def test_setup_config_entry( + hass: HomeAssistant, + snapshot: SnapshotAssertion, +) -> None: + """Test the config flow.""" + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "number", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + state = hass.states.get("number.my_template") + assert state is not None + assert state == snapshot + + async def test_missing_optional_config(hass: HomeAssistant) -> None: """Test: missing optional template is ok.""" with assert_setup_component(1, "template"): @@ -332,12 +369,12 @@ async def test_trigger_number(hass: HomeAssistant) -> None: def _verify( - hass, - expected_value, - expected_step, - expected_minimum, - expected_maximum, -): + hass: HomeAssistant, + expected_value: int, + expected_step: int, + expected_minimum: int, + expected_maximum: int, +) -> None: """Verify number's state.""" state = hass.states.get(_TEST_NUMBER) attributes = state.attributes @@ -460,3 +497,50 @@ async def test_icon_template_with_trigger(hass: HomeAssistant) -> None: state = hass.states.get(_TEST_NUMBER) assert float(state.state) == 51 assert state.attributes[ATTR_ICON] == "mdi:greater" + + +async def test_device_id( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test for device for number template.""" + + device_config_entry = MockConfigEntry() + device_config_entry.add_to_hass(hass) + device_entry = device_registry.async_get_or_create( + config_entry_id=device_config_entry.entry_id, + identifiers={("test", "identifier_test")}, + connections={("mac", "30:31:32:33:34:35")}, + ) + await hass.async_block_till_done() + assert device_entry is not None + assert device_entry.id is not None + + template_config_entry = MockConfigEntry( + data={}, + domain=DOMAIN, + options={ + "name": "My template", + "template_type": "number", + "state": "{{ 10 }}", + "min": "{{ 0 }}", + "max": "{{ 100 }}", + "step": "{{ 0.1 }}", + "set_value": { + "action": "input_number.set_value", + "target": {"entity_id": "input_number.test"}, + "data": {"value": "{{ value }}"}, + }, + "device_id": device_entry.id, + }, + title="My template", + ) + template_config_entry.add_to_hass(hass) + + assert await hass.config_entries.async_setup(template_config_entry.entry_id) + await hass.async_block_till_done() + + template_entity = entity_registry.async_get("number.my_template") + assert template_entity is not None + assert template_entity.device_id == device_entry.id diff --git a/tests/components/template/test_select.py b/tests/components/template/test_select.py index 2268c0840aa..5b4723a3034 100644 --- a/tests/components/template/test_select.py +++ b/tests/components/template/test_select.py @@ -318,7 +318,12 @@ async def test_trigger_select(hass: HomeAssistant) -> None: assert events[0].event_type == "test_number_event" -def _verify(hass, expected_current_option, expected_options, entity_name=_TEST_SELECT): +def _verify( + hass: HomeAssistant, + expected_current_option: str, + expected_options: list[str], + entity_name: str = _TEST_SELECT, +) -> None: """Verify select's state.""" state = hass.states.get(entity_name) attributes = state.attributes diff --git a/tests/components/template/test_sensor.py b/tests/components/template/test_sensor.py index 37d6d120491..fb352ebcb8c 100644 --- a/tests/components/template/test_sensor.py +++ b/tests/components/template/test_sensor.py @@ -23,7 +23,9 @@ from homeassistant.const import ( from homeassistant.core import Context, CoreState, HomeAssistant, State, callback from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.template import Template +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import ATTR_COMPONENT, async_setup_component import homeassistant.util.dt as dt_util @@ -374,7 +376,7 @@ async def test_creating_sensor_loads_group(hass: HomeAssistant) -> None: order = [] after_dep_event = Event() - async def async_setup_group(hass, config): + async def async_setup_group(hass: HomeAssistant, config: ConfigType) -> bool: # Make sure group takes longer to load, so that it won't # be loaded first by chance await after_dep_event.wait() @@ -383,8 +385,11 @@ async def test_creating_sensor_loads_group(hass: HomeAssistant) -> None: return True async def async_setup_template( - hass, config, async_add_entities, discovery_info=None - ): + hass: HomeAssistant, + config: ConfigType, + async_add_entities: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, + ) -> bool: order.append("sensor.template") return True diff --git a/tests/components/template/test_template_entity.py b/tests/components/template/test_template_entity.py index dcceea95181..c09a09750fe 100644 --- a/tests/components/template/test_template_entity.py +++ b/tests/components/template/test_template_entity.py @@ -11,14 +11,14 @@ async def test_template_entity_requires_hass_set(hass: HomeAssistant) -> None: """Test template entity requires hass to be set before accepting templates.""" entity = template_entity.TemplateEntity(hass) - with pytest.raises(AssertionError): + with pytest.raises(ValueError, match="^hass cannot be None"): entity.add_template_attribute("_hello", template.Template("Hello")) entity.hass = object() - entity.add_template_attribute("_hello", template.Template("Hello", None)) + with pytest.raises(ValueError, match="^template.hass cannot be None"): + entity.add_template_attribute("_hello", template.Template("Hello", None)) tpl_with_hass = template.Template("Hello", entity.hass) entity.add_template_attribute("_hello", tpl_with_hass) - # Because hass is set in `add_template_attribute`, both templates match `tpl_with_hass` - assert len(entity._template_attrs.get(tpl_with_hass, [])) == 2 + assert len(entity._template_attrs.get(tpl_with_hass, [])) == 1 diff --git a/tests/components/template/test_vacuum.py b/tests/components/template/test_vacuum.py index 8b1d082a62b..fd3e3e872ad 100644 --- a/tests/components/template/test_vacuum.py +++ b/tests/components/template/test_vacuum.py @@ -484,7 +484,9 @@ async def test_set_invalid_fan_speed( assert hass.states.get(_FAN_SPEED_INPUT_SELECT).state == "high" -def _verify(hass, expected_state, expected_battery_level): +def _verify( + hass: HomeAssistant, expected_state: str, expected_battery_level: int +) -> None: """Verify vacuum's state and speed.""" state = hass.states.get(_TEST_VACUUM) attributes = state.attributes @@ -492,7 +494,7 @@ def _verify(hass, expected_state, expected_battery_level): assert attributes.get(ATTR_BATTERY_LEVEL) == expected_battery_level -async def _register_basic_vacuum(hass): +async def _register_basic_vacuum(hass: HomeAssistant) -> None: """Register basic vacuum with only required options for testing.""" with assert_setup_component(1, "input_select"): assert await setup.async_setup_component( @@ -528,7 +530,7 @@ async def _register_basic_vacuum(hass): await hass.async_block_till_done() -async def _register_components(hass): +async def _register_components(hass: HomeAssistant) -> None: """Register basic components for testing.""" with assert_setup_component(2, "input_boolean"): assert await setup.async_setup_component( diff --git a/tests/components/tesla_fleet/conftest.py b/tests/components/tesla_fleet/conftest.py index 7d60ae5e174..615c62fe16e 100644 --- a/tests/components/tesla_fleet/conftest.py +++ b/tests/components/tesla_fleet/conftest.py @@ -106,7 +106,7 @@ def mock_wake_up() -> Generator[AsyncMock]: @pytest.fixture(autouse=True) def mock_live_status() -> Generator[AsyncMock]: - """Mock Teslemetry Energy Specific live_status method.""" + """Mock Tesla Fleet API Energy Specific live_status method.""" with patch( "homeassistant.components.tesla_fleet.EnergySpecific.live_status", side_effect=lambda: deepcopy(LIVE_STATUS), @@ -116,9 +116,18 @@ def mock_live_status() -> Generator[AsyncMock]: @pytest.fixture(autouse=True) def mock_site_info() -> Generator[AsyncMock]: - """Mock Teslemetry Energy Specific site_info method.""" + """Mock Tesla Fleet API Energy Specific site_info method.""" with patch( "homeassistant.components.tesla_fleet.EnergySpecific.site_info", side_effect=lambda: deepcopy(SITE_INFO), ) as mock_live_status: yield mock_live_status + + +@pytest.fixture +def mock_find_server() -> Generator[AsyncMock]: + """Mock Tesla Fleet find server method.""" + with patch( + "homeassistant.components.tesla_fleet.TeslaFleetApi.find_server", + ) as mock_find_server: + yield mock_find_server diff --git a/tests/components/tesla_fleet/snapshots/test_sensor.ambr b/tests/components/tesla_fleet/snapshots/test_sensor.ambr index e4c4c3d96c2..c6a4860056a 100644 --- a/tests/components/tesla_fleet/snapshots/test_sensor.ambr +++ b/tests/components/tesla_fleet/snapshots/test_sensor.ambr @@ -437,67 +437,6 @@ 'state': '6.245', }) # --- -# name: test_sensors[sensor.energy_site_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.energy_site_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tesla_fleet', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'island_status', - 'unique_id': '123456-island_status', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensors[sensor.energy_site_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site None', - }), - 'context': , - 'entity_id': 'sensor.energy_site_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- -# name: test_sensors[sensor.energy_site_none-statealt] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'enum', - 'friendly_name': 'Energy Site None', - }), - 'context': , - 'entity_id': 'sensor.energy_site_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on_grid', - }) -# --- # name: test_sensors[sensor.energy_site_percentage_charged-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tesla_fleet/test_binary_sensors.py b/tests/components/tesla_fleet/test_binary_sensors.py index ffbaac5e6d8..a759e5ced70 100644 --- a/tests/components/tesla_fleet/test_binary_sensors.py +++ b/tests/components/tesla_fleet/test_binary_sensors.py @@ -1,8 +1,10 @@ """Test the Tesla Fleet binary sensor platform.""" +from unittest.mock import AsyncMock + from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL @@ -34,7 +36,7 @@ async def test_binary_sensor_refresh( hass: HomeAssistant, snapshot: SnapshotAssertion, entity_registry: er.EntityRegistry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, normal_config_entry: MockConfigEntry, ) -> None: @@ -53,7 +55,7 @@ async def test_binary_sensor_refresh( async def test_binary_sensor_offline( hass: HomeAssistant, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, normal_config_entry: MockConfigEntry, ) -> None: """Tests that the binary sensor entities are correct when offline.""" diff --git a/tests/components/tesla_fleet/test_config_flow.py b/tests/components/tesla_fleet/test_config_flow.py index bd1c7d7c2b8..b49e090cd5d 100644 --- a/tests/components/tesla_fleet/test_config_flow.py +++ b/tests/components/tesla_fleet/test_config_flow.py @@ -5,6 +5,10 @@ from urllib.parse import parse_qs, urlparse import pytest +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) from homeassistant.components.tesla_fleet.const import ( AUTHORIZE_URL, CLIENT_ID, @@ -12,10 +16,11 @@ from homeassistant.components.tesla_fleet.const import ( SCOPES, TOKEN_URL, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow +from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry from tests.test_util.aiohttp import AiohttpClientMocker @@ -26,7 +31,7 @@ UNIQUE_ID = "uid" @pytest.fixture -async def access_token(hass: HomeAssistant) -> dict[str, str | list[str]]: +async def access_token(hass: HomeAssistant) -> str: """Return a valid access token.""" return config_entry_oauth2_flow._encode_jwt( hass, @@ -52,7 +57,7 @@ async def test_full_flow( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - access_token, + access_token: str, ) -> None: """Check full flow.""" result = await hass.config_entries.flow.async_init( @@ -111,12 +116,91 @@ async def test_full_flow( assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow_user_cred( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + access_token: str, +) -> None: + """Check full flow.""" + + # Create user application credential + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential("user_client_id", "user_client_secret"), + "user_cred", + ) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + + assert result["type"] is FlowResultType.FORM + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], {"implementation": "user_cred"} + ) + assert result["type"] is FlowResultType.EXTERNAL_STEP + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": REDIRECT, + }, + ) + + assert result["url"].startswith(AUTHORIZE_URL) + parsed_url = urlparse(result["url"]) + parsed_query = parse_qs(parsed_url.query) + assert parsed_query["response_type"][0] == "code" + assert parsed_query["client_id"][0] == "user_client_id" + assert parsed_query["redirect_uri"][0] == REDIRECT + assert parsed_query["state"][0] == state + assert parsed_query["scope"][0] == " ".join(SCOPES) + assert "code_challenge" not in parsed_query # Ensure not a PKCE flow + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + TOKEN_URL, + json={ + "refresh_token": "mock-refresh-token", + "access_token": access_token, + "type": "Bearer", + "expires_in": 60, + }, + ) + with patch( + "homeassistant.components.tesla_fleet.async_setup_entry", return_value=True + ) as mock_setup: + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup.mock_calls) == 1 + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == UNIQUE_ID + assert "result" in result + assert result["result"].unique_id == UNIQUE_ID + assert "token" in result["result"].data + assert result["result"].data["token"]["access_token"] == access_token + assert result["result"].data["token"]["refresh_token"] == "mock-refresh-token" + + @pytest.mark.usefixtures("current_request_with_host") async def test_reauthentication( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - access_token, + access_token: str, ) -> None: """Test Tesla Fleet reauthentication.""" old_entry = MockConfigEntry( @@ -127,15 +211,7 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 @@ -177,21 +253,13 @@ async def test_reauth_account_mismatch( hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator, aioclient_mock: AiohttpClientMocker, - access_token, + access_token: str, ) -> None: """Test Tesla Fleet reauthentication with different account.""" old_entry = MockConfigEntry(domain=DOMAIN, unique_id="baduid", version=1, data={}) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() result = await hass.config_entries.flow.async_configure(flows[0]["flow_id"], {}) diff --git a/tests/components/tesla_fleet/test_device_tracker.py b/tests/components/tesla_fleet/test_device_tracker.py index 66a0c06de7f..e6f483d7953 100644 --- a/tests/components/tesla_fleet/test_device_tracker.py +++ b/tests/components/tesla_fleet/test_device_tracker.py @@ -1,6 +1,8 @@ """Test the Tesla Fleet device tracker platform.""" -from syrupy import SnapshotAssertion +from unittest.mock import AsyncMock + +from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import VehicleOffline from homeassistant.const import STATE_UNKNOWN, Platform @@ -26,7 +28,7 @@ async def test_device_tracker( async def test_device_tracker_offline( hass: HomeAssistant, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, normal_config_entry: MockConfigEntry, ) -> None: """Tests that the device tracker entities are correct when offline.""" diff --git a/tests/components/tesla_fleet/test_init.py b/tests/components/tesla_fleet/test_init.py index 20bb6c66906..9dcac4ec388 100644 --- a/tests/components/tesla_fleet/test_init.py +++ b/tests/components/tesla_fleet/test_init.py @@ -1,12 +1,16 @@ """Test the Tesla Fleet init.""" -from unittest.mock import AsyncMock +from unittest.mock import AsyncMock, patch +from aiohttp import RequestInfo +from aiohttp.client_exceptions import ClientResponseError from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from tesla_fleet_api.exceptions import ( + InvalidRegion, InvalidToken, + LibraryError, LoginRequired, OAuthExpired, RateLimited, @@ -14,6 +18,7 @@ from tesla_fleet_api.exceptions import ( VehicleOffline, ) +from homeassistant.components.tesla_fleet.const import AUTHORIZE_URL from homeassistant.components.tesla_fleet.coordinator import ( ENERGY_INTERVAL, ENERGY_INTERVAL_SECONDS, @@ -59,9 +64,9 @@ async def test_load_unload( async def test_init_error( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_products, - side_effect, - state, + mock_products: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test init with errors.""" @@ -70,6 +75,50 @@ async def test_init_error( assert normal_config_entry.state is state +async def test_oauth_refresh_expired( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Test init with expired Oauth token.""" + + # Patch the token refresh to raise an error + with patch( + "homeassistant.components.tesla_fleet.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + RequestInfo(AUTHORIZE_URL, "POST", {}, AUTHORIZE_URL), None, status=401 + ), + ) as mock_async_ensure_token_valid: + # Trigger an unmocked function call + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + + mock_async_ensure_token_valid.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR + + +async def test_oauth_refresh_error( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, +) -> None: + """Test init with Oauth refresh failure.""" + + # Patch the token refresh to raise an error + with patch( + "homeassistant.components.tesla_fleet.OAuth2Session.async_ensure_token_valid", + side_effect=ClientResponseError( + RequestInfo(AUTHORIZE_URL, "POST", {}, AUTHORIZE_URL), None, status=400 + ), + ) as mock_async_ensure_token_valid: + # Trigger an unmocked function call + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + + mock_async_ensure_token_valid.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_RETRY + + # Test devices async def test_devices( hass: HomeAssistant, @@ -91,8 +140,8 @@ async def test_devices( async def test_vehicle_refresh_offline( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_vehicle_state, - mock_vehicle_data, + mock_vehicle_state: AsyncMock, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator refresh with an error.""" @@ -148,7 +197,7 @@ async def test_vehicle_refresh_error( async def test_vehicle_refresh_ratelimited( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator refresh handles 429.""" @@ -179,7 +228,7 @@ async def test_vehicle_refresh_ratelimited( async def test_vehicle_sleep( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator refresh with an error.""" @@ -241,9 +290,9 @@ async def test_vehicle_sleep( async def test_energy_live_refresh_error( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_live_status, - side_effect, - state, + mock_live_status: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test coordinator refresh with an error.""" mock_live_status.side_effect = side_effect @@ -256,9 +305,9 @@ async def test_energy_live_refresh_error( async def test_energy_site_refresh_error( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_site_info, - side_effect, - state, + mock_site_info: AsyncMock, + side_effect: TeslaFleetError, + state: ConfigEntryState, ) -> None: """Test coordinator refresh with an error.""" mock_site_info.side_effect = side_effect @@ -300,7 +349,7 @@ async def test_energy_live_refresh_ratelimited( async def test_energy_info_refresh_ratelimited( hass: HomeAssistant, normal_config_entry: MockConfigEntry, - mock_site_info, + mock_site_info: AsyncMock, freezer: FrozenDateTimeFactory, ) -> None: """Test coordinator refresh handles 429.""" @@ -326,3 +375,32 @@ async def test_energy_info_refresh_ratelimited( await hass.async_block_till_done() assert mock_site_info.call_count == 3 + + +async def test_init_region_issue( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, + mock_find_server: AsyncMock, +) -> None: + """Test init with region issue.""" + + mock_products.side_effect = InvalidRegion + await setup_platform(hass, normal_config_entry) + mock_find_server.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_init_region_issue_failed( + hass: HomeAssistant, + normal_config_entry: MockConfigEntry, + mock_products: AsyncMock, + mock_find_server: AsyncMock, +) -> None: + """Test init with unresolvable region issue.""" + + mock_products.side_effect = InvalidRegion + mock_find_server.side_effect = LibraryError + await setup_platform(hass, normal_config_entry) + mock_find_server.assert_called_once() + assert normal_config_entry.state is ConfigEntryState.SETUP_ERROR diff --git a/tests/components/tesla_fleet/test_sensor.py b/tests/components/tesla_fleet/test_sensor.py index 2133194e2a0..377179ca26a 100644 --- a/tests/components/tesla_fleet/test_sensor.py +++ b/tests/components/tesla_fleet/test_sensor.py @@ -1,8 +1,10 @@ """Test the Tesla Fleet sensor platform.""" +from unittest.mock import AsyncMock + from freezegun.api import FrozenDateTimeFactory import pytest -from syrupy import SnapshotAssertion +from syrupy.assertion import SnapshotAssertion from homeassistant.components.tesla_fleet.coordinator import VEHICLE_INTERVAL from homeassistant.const import Platform @@ -22,7 +24,7 @@ async def test_sensors( normal_config_entry: MockConfigEntry, entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, - mock_vehicle_data, + mock_vehicle_data: AsyncMock, ) -> None: """Tests that the sensor entities are correct.""" diff --git a/tests/components/teslemetry/test_config_flow.py b/tests/components/teslemetry/test_config_flow.py index fa35142dc07..03e46c6a8be 100644 --- a/tests/components/teslemetry/test_config_flow.py +++ b/tests/components/teslemetry/test_config_flow.py @@ -94,14 +94,7 @@ async def test_reauth(hass: HomeAssistant, mock_metadata) -> None: ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=BAD_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "reauth_confirm" @@ -144,15 +137,7 @@ async def test_reauth_errors( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=BAD_CONFIG, - ) + result = await mock_entry.start_reauth_flow(hass) mock_metadata.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/tessie/snapshots/test_cover.ambr b/tests/components/tessie/snapshots/test_cover.ambr index 8c8c9a48c11..6338758afb7 100644 --- a/tests/components/tessie/snapshots/test_cover.ambr +++ b/tests/components/tessie/snapshots/test_cover.ambr @@ -95,39 +95,6 @@ 'state': 'closed', }) # --- -# name: test_covers[cover.test_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'cover', - 'entity_category': None, - 'entity_id': 'cover.test_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': None, - 'platform': 'tessie', - 'previous_unique_id': None, - 'supported_features': , - 'translation_key': 'vehicle_state_sun_roof_state', - 'unique_id': 'VINVINVIN-vehicle_state_sun_roof_state', - 'unit_of_measurement': None, - }) -# --- # name: test_covers[cover.test_sunroof-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tessie/test_config_flow.py b/tests/components/tessie/test_config_flow.py index 043086971fa..d51d467002d 100644 --- a/tests/components/tessie/test_config_flow.py +++ b/tests/components/tessie/test_config_flow.py @@ -143,14 +143,7 @@ async def test_reauth( ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=TEST_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) assert result1["type"] is FlowResultType.FORM assert result1["step_id"] == "reauth_confirm" @@ -194,15 +187,7 @@ async def test_reauth_errors( ) mock_entry.add_to_hass(hass) - result1 = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=TEST_CONFIG, - ) + result1 = await mock_entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result1["flow_id"], diff --git a/tests/components/thethingsnetwork/test_config_flow.py b/tests/components/thethingsnetwork/test_config_flow.py index 107d84e099b..99c4a080e17 100644 --- a/tests/components/thethingsnetwork/test_config_flow.py +++ b/tests/components/thethingsnetwork/test_config_flow.py @@ -4,7 +4,7 @@ import pytest from ttn_client import TTNAuthError from homeassistant.components.thethingsnetwork.const import CONF_APP_ID, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_API_KEY, CONF_HOST from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -12,6 +12,8 @@ from homeassistant.data_entry_flow import FlowResultType from . import init_integration from .conftest import API_KEY, APP_ID, HOST +from tests.common import MockConfigEntry + USER_DATA = {CONF_HOST: HOST, CONF_APP_ID: APP_ID, CONF_API_KEY: API_KEY} @@ -92,21 +94,13 @@ async def test_duplicate_entry( async def test_step_reauth( - hass: HomeAssistant, mock_ttnclient, mock_config_entry + hass: HomeAssistant, mock_ttnclient, mock_config_entry: MockConfigEntry ) -> None: """Test that the reauth step works.""" await init_integration(hass, mock_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": APP_ID, - "entry_id": mock_config_entry.entry_id, - }, - data=USER_DATA, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert not result["errors"] diff --git a/tests/components/tibber/test_config_flow.py b/tests/components/tibber/test_config_flow.py index 28b590a29d2..0c12c4a247b 100644 --- a/tests/components/tibber/test_config_flow.py +++ b/tests/components/tibber/test_config_flow.py @@ -5,7 +5,11 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from aiohttp import ClientError import pytest -from tibber import FatalHttpException, InvalidLogin, RetryableHttpException +from tibber import ( + FatalHttpExceptionError, + InvalidLoginError, + RetryableHttpExceptionError, +) from homeassistant import config_entries from homeassistant.components.recorder import Recorder @@ -66,9 +70,9 @@ async def test_create_entry(recorder_mock: Recorder, hass: HomeAssistant) -> Non [ (TimeoutError, ERR_TIMEOUT), (ClientError, ERR_CLIENT), - (InvalidLogin(401), ERR_TOKEN), - (RetryableHttpException(503), ERR_CLIENT), - (FatalHttpException(404), ERR_CLIENT), + (InvalidLoginError(401), ERR_TOKEN), + (RetryableHttpExceptionError(503), ERR_CLIENT), + (FatalHttpExceptionError(404), ERR_CLIENT), ], ) async def test_create_entry_exceptions( diff --git a/tests/components/tile/conftest.py b/tests/components/tile/conftest.py index e3b55c49ae7..01a711d9261 100644 --- a/tests/components/tile/conftest.py +++ b/tests/components/tile/conftest.py @@ -1,6 +1,8 @@ """Define test fixtures for Tile.""" +from collections.abc import Generator import json +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -8,6 +10,7 @@ from pytile.tile import Tile from homeassistant.components.tile.const import DOMAIN from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture @@ -16,7 +19,7 @@ TEST_USERNAME = "user@host.com" @pytest.fixture(name="api") -def api_fixture(hass, data_tile_details): +def api_fixture(data_tile_details: dict[str, Any]) -> Mock: """Define a pytile API object.""" tile = Tile(None, data_tile_details) tile.async_update = AsyncMock() @@ -29,7 +32,9 @@ def api_fixture(hass, data_tile_details): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config): +def config_entry_fixture( + hass: HomeAssistant, config: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry(domain=DOMAIN, unique_id=config[CONF_USERNAME], data=config) entry.add_to_hass(hass) @@ -37,7 +42,7 @@ def config_entry_fixture(hass, config): @pytest.fixture(name="config") -def config_fixture(): +def config_fixture() -> dict[str, Any]: """Define a config entry data fixture.""" return { CONF_USERNAME: TEST_USERNAME, @@ -52,7 +57,7 @@ def data_tile_details_fixture(): @pytest.fixture(name="mock_pytile") -async def mock_pytile_fixture(api): +def mock_pytile_fixture(api: Mock) -> Generator[None]: """Define a fixture to patch pytile.""" with ( patch( @@ -64,7 +69,9 @@ async def mock_pytile_fixture(api): @pytest.fixture(name="setup_config_entry") -async def setup_config_entry_fixture(hass, config_entry, mock_pytile): +async def setup_config_entry_fixture( + hass: HomeAssistant, config_entry: MockConfigEntry, mock_pytile: None +) -> None: """Define a fixture to set up tile.""" assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/tile/test_config_flow.py b/tests/components/tile/test_config_flow.py index 87fe976ca3f..849be41d560 100644 --- a/tests/components/tile/test_config_flow.py +++ b/tests/components/tile/test_config_flow.py @@ -6,13 +6,15 @@ import pytest from pytile.errors import InvalidAuthError, TileError from homeassistant.components.tile import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER from homeassistant.const import CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from .conftest import TEST_PASSWORD, TEST_USERNAME +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("mock_login_response", "errors"), @@ -77,12 +79,10 @@ async def test_import_entry(hass: HomeAssistant, config, mock_pytile) -> None: async def test_step_reauth( - hass: HomeAssistant, config, config_entry, setup_config_entry + hass: HomeAssistant, config, config_entry: MockConfigEntry, setup_config_entry ) -> None: """Test that the reauth step works.""" - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=config - ) + result = await config_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/tod/test_binary_sensor.py b/tests/components/tod/test_binary_sensor.py index c4b28b527cb..b4b6b13d8e3 100644 --- a/tests/components/tod/test_binary_sensor.py +++ b/tests/components/tod/test_binary_sensor.py @@ -1,6 +1,6 @@ """Test Times of the Day Binary Sensor.""" -from datetime import datetime, timedelta +from datetime import datetime, timedelta, tzinfo from freezegun.api import FrozenDateTimeFactory import pytest @@ -16,13 +16,13 @@ from tests.common import assert_setup_component, async_fire_time_changed @pytest.fixture -def hass_time_zone(): +def hass_time_zone() -> str: """Return default hass timezone.""" return "US/Pacific" @pytest.fixture(autouse=True) -async def setup_fixture(hass, hass_time_zone): +async def setup_fixture(hass: HomeAssistant, hass_time_zone: str) -> None: """Set up things to be run when tests are started.""" hass.config.latitude = 50.27583 hass.config.longitude = 18.98583 @@ -30,7 +30,7 @@ async def setup_fixture(hass, hass_time_zone): @pytest.fixture -def hass_tz_info(hass): +def hass_tz_info(hass: HomeAssistant) -> tzinfo | None: """Return timezone info for the hass timezone.""" return dt_util.get_time_zone(hass.config.time_zone) diff --git a/tests/components/todoist/conftest.py b/tests/components/todoist/conftest.py index 45fda53ccc1..4b2bfea2e30 100644 --- a/tests/components/todoist/conftest.py +++ b/tests/components/todoist/conftest.py @@ -7,7 +7,7 @@ from unittest.mock import AsyncMock, patch import pytest from requests.exceptions import HTTPError from requests.models import Response -from todoist_api_python.models import Collaborator, Due, Label, Project, Task +from todoist_api_python.models import Collaborator, Due, Label, Project, Section, Task from homeassistant.components.todoist import DOMAIN from homeassistant.const import CONF_TOKEN, Platform @@ -18,6 +18,7 @@ from homeassistant.util import dt as dt_util from tests.common import MockConfigEntry PROJECT_ID = "project-id-1" +SECTION_ID = "section-id-1" SUMMARY = "A task" TOKEN = "some-token" TODAY = dt_util.now().strftime("%Y-%m-%d") @@ -98,6 +99,14 @@ def mock_api(tasks: list[Task]) -> AsyncMock: view_style="list", ) ] + api.get_sections.return_value = [ + Section( + id=SECTION_ID, + project_id=PROJECT_ID, + name="Section Name", + order=1, + ) + ] api.get_labels.return_value = [ Label(id="1", name="Label1", color="1", order=1, is_favorite=False) ] diff --git a/tests/components/todoist/test_calendar.py b/tests/components/todoist/test_calendar.py index d8123af3231..071a14a70ae 100644 --- a/tests/components/todoist/test_calendar.py +++ b/tests/components/todoist/test_calendar.py @@ -18,15 +18,17 @@ from homeassistant.components.todoist.const import ( DOMAIN, LABELS, PROJECT_NAME, + SECTION_NAME, SERVICE_NEW_TASK, ) from homeassistant.const import CONF_TOKEN, Platform from homeassistant.core import HomeAssistant +from homeassistant.exceptions import ServiceValidationError from homeassistant.helpers import entity_registry as er from homeassistant.helpers.entity_component import async_update_entity from homeassistant.util import dt as dt_util -from .conftest import PROJECT_ID, SUMMARY +from .conftest import PROJECT_ID, SECTION_ID, SUMMARY from tests.typing import ClientSessionGenerator @@ -269,6 +271,51 @@ async def test_create_task_service_call(hass: HomeAssistant, api: AsyncMock) -> ) +async def test_create_task_service_call_raises( + hass: HomeAssistant, api: AsyncMock +) -> None: + """Test adding an item to an invalid project raises an error.""" + + with pytest.raises(ServiceValidationError, match="project_invalid"): + await hass.services.async_call( + DOMAIN, + SERVICE_NEW_TASK, + { + ASSIGNEE: "user", + CONTENT: "task", + LABELS: ["Label1"], + PROJECT_NAME: "Missing Project", + }, + blocking=True, + ) + + +async def test_create_task_service_call_with_section( + hass: HomeAssistant, api: AsyncMock +) -> None: + """Test api is called correctly when section is included.""" + await hass.services.async_call( + DOMAIN, + SERVICE_NEW_TASK, + { + ASSIGNEE: "user", + CONTENT: "task", + LABELS: ["Label1"], + PROJECT_NAME: "Name", + SECTION_NAME: "Section Name", + }, + ) + await hass.async_block_till_done() + + api.add_task.assert_called_with( + "task", + project_id=PROJECT_ID, + section_id=SECTION_ID, + labels=["Label1"], + assignee_id="1", + ) + + @pytest.mark.parametrize( ("due"), [ diff --git a/tests/components/tomato/test_device_tracker.py b/tests/components/tomato/test_device_tracker.py index 099a2c2b40a..9484d3393d7 100644 --- a/tests/components/tomato/test_device_tracker.py +++ b/tests/components/tomato/test_device_tracker.py @@ -25,7 +25,7 @@ def mock_session_response(*args, **kwargs): """Mock data generation for session response.""" class MockSessionResponse: - def __init__(self, text, status_code): + def __init__(self, text, status_code) -> None: self.text = text self.status_code = status_code diff --git a/tests/components/tomorrowio/snapshots/test_weather.ambr b/tests/components/tomorrowio/snapshots/test_weather.ambr index fe65925e4c7..6278b50b7f7 100644 --- a/tests/components/tomorrowio/snapshots/test_weather.ambr +++ b/tests/components/tomorrowio/snapshots/test_weather.ambr @@ -735,1126 +735,6 @@ }), ]) # --- -# name: test_v4_forecast_service - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }) -# --- -# name: test_v4_forecast_service.1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }) -# --- -# name: test_v4_forecast_service[forecast] - dict({ - 'weather.tomorrow_io_daily': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }), - }) -# --- -# name: test_v4_forecast_service[forecast].1 - dict({ - 'weather.tomorrow_io_daily': dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }), - }) -# --- -# name: test_v4_forecast_service[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T11:00:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.9, - 'templow': 26.1, - 'wind_bearing': 239.6, - 'wind_speed': 34.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 49.4, - 'templow': 26.3, - 'wind_bearing': 262.82, - 'wind_speed': 26.06, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-09T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 67.0, - 'templow': 31.5, - 'wind_bearing': 229.3, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-10T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 65.3, - 'templow': 37.3, - 'wind_bearing': 149.91, - 'wind_speed': 38.3, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-11T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 66.2, - 'templow': 48.3, - 'wind_bearing': 210.45, - 'wind_speed': 56.48, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-12T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 67.9, - 'templow': 53.8, - 'wind_bearing': 217.98, - 'wind_speed': 44.28, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-13T11:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 25, - 'temperature': 54.5, - 'templow': 42.9, - 'wind_bearing': 58.79, - 'wind_speed': 34.99, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-14T10:00:00+00:00', - 'precipitation': 0.94, - 'precipitation_probability': 95, - 'temperature': 42.9, - 'templow': 33.4, - 'wind_bearing': 70.25, - 'wind_speed': 58.5, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-15T10:00:00+00:00', - 'precipitation': 0.06, - 'precipitation_probability': 55, - 'temperature': 43.7, - 'templow': 29.4, - 'wind_bearing': 84.47, - 'wind_speed': 57.2, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-16T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 43.0, - 'templow': 29.1, - 'wind_bearing': 103.85, - 'wind_speed': 24.16, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-17T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 52.4, - 'templow': 34.3, - 'wind_bearing': 145.41, - 'wind_speed': 26.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-18T10:00:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 10, - 'temperature': 54.1, - 'templow': 41.3, - 'wind_bearing': 62.99, - 'wind_speed': 23.69, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2021-03-19T10:00:00+00:00', - 'precipitation': 0.12, - 'precipitation_probability': 55, - 'temperature': 48.9, - 'templow': 39.4, - 'wind_bearing': 68.54, - 'wind_speed': 50.08, - }), - dict({ - 'condition': 'snowy', - 'datetime': '2021-03-20T10:00:00+00:00', - 'precipitation': 0.05, - 'precipitation_probability': 33, - 'temperature': 40.1, - 'templow': 35.1, - 'wind_bearing': 56.98, - 'wind_speed': 62.46, - }), - ]), - }) -# --- -# name: test_v4_forecast_service[get_forecast].1 - dict({ - 'forecast': list([ - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T17:48:00+00:00', - 'dew_point': 12.8, - 'humidity': 58, - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.1, - 'wind_bearing': 315.14, - 'wind_speed': 33.59, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T18:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.8, - 'wind_bearing': 321.71, - 'wind_speed': 31.82, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T19:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.8, - 'wind_bearing': 323.38, - 'wind_speed': 32.04, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T20:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 45.3, - 'wind_bearing': 318.43, - 'wind_speed': 33.73, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T21:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 44.6, - 'wind_bearing': 320.9, - 'wind_speed': 28.98, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T22:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 41.9, - 'wind_bearing': 322.11, - 'wind_speed': 15.7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-07T23:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 38.9, - 'wind_bearing': 295.94, - 'wind_speed': 17.78, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T00:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 36.2, - 'wind_bearing': 11.94, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2021-03-08T01:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 34.3, - 'wind_bearing': 13.68, - 'wind_speed': 20.05, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T02:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 32.9, - 'wind_bearing': 14.93, - 'wind_speed': 19.48, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T03:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.9, - 'wind_bearing': 26.07, - 'wind_speed': 16.6, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T04:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 51.27, - 'wind_speed': 9.32, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T05:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.4, - 'wind_bearing': 343.25, - 'wind_speed': 11.92, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T06:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.7, - 'wind_bearing': 341.46, - 'wind_speed': 15.37, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T07:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.4, - 'wind_bearing': 322.34, - 'wind_speed': 12.71, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T08:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 26.1, - 'wind_bearing': 294.69, - 'wind_speed': 13.14, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T09:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 30.1, - 'wind_bearing': 325.32, - 'wind_speed': 11.52, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T10:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 31.0, - 'wind_bearing': 322.27, - 'wind_speed': 10.22, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T11:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 27.2, - 'wind_bearing': 310.14, - 'wind_speed': 20.12, - }), - dict({ - 'condition': 'clear-night', - 'datetime': '2021-03-08T12:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 29.2, - 'wind_bearing': 324.8, - 'wind_speed': 25.38, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T13:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 33.2, - 'wind_bearing': 335.16, - 'wind_speed': 23.26, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T14:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 37.0, - 'wind_bearing': 324.49, - 'wind_speed': 21.17, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2021-03-08T15:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 40.0, - 'wind_bearing': 310.68, - 'wind_speed': 19.98, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2021-03-08T16:48:00+00:00', - 'precipitation': 0.0, - 'precipitation_probability': 0, - 'temperature': 42.4, - 'wind_bearing': 304.18, - 'wind_speed': 19.66, - }), - ]), - }) -# --- # name: test_v4_forecast_service[get_forecasts] dict({ 'weather.tomorrow_io_daily': dict({ diff --git a/tests/components/toon/test_config_flow.py b/tests/components/toon/test_config_flow.py index 588924b416f..492e2a220ad 100644 --- a/tests/components/toon/test_config_flow.py +++ b/tests/components/toon/test_config_flow.py @@ -20,7 +20,7 @@ from tests.test_util.aiohttp import AiohttpClientMocker from tests.typing import ClientSessionGenerator -async def setup_component(hass): +async def setup_component(hass: HomeAssistant) -> None: """Set up Toon component.""" await async_process_ha_core_config( hass, diff --git a/tests/components/totalconnect/common.py b/tests/components/totalconnect/common.py index 1ceb893112c..6e9bb28a9b6 100644 --- a/tests/components/totalconnect/common.py +++ b/tests/components/totalconnect/common.py @@ -5,7 +5,8 @@ from unittest.mock import patch from total_connect_client import ArmingState, ResultCode, ZoneStatus, ZoneType from homeassistant.components.totalconnect.const import CONF_USERCODES, DOMAIN -from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, Platform +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry @@ -394,7 +395,7 @@ TOTALCONNECT_REQUEST = ( ) -async def setup_platform(hass, platform): +async def setup_platform(hass: HomeAssistant, platform: Platform) -> MockConfigEntry: """Set up the TotalConnect platform.""" # first set up a config entry and add it to hass mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA) @@ -422,7 +423,7 @@ async def setup_platform(hass, platform): return mock_entry -async def init_integration(hass): +async def init_integration(hass: HomeAssistant) -> MockConfigEntry: """Set up the TotalConnect integration.""" # first set up a config entry and add it to hass mock_entry = MockConfigEntry(domain=DOMAIN, data=CONFIG_DATA) diff --git a/tests/components/totalconnect/test_config_flow.py b/tests/components/totalconnect/test_config_flow.py index 98de748faea..a0be52afb3b 100644 --- a/tests/components/totalconnect/test_config_flow.py +++ b/tests/components/totalconnect/test_config_flow.py @@ -9,7 +9,7 @@ from homeassistant.components.totalconnect.const import ( CONF_USERCODES, DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -141,9 +141,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH}, data=entry.data - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/touchline_sl/__init__.py b/tests/components/touchline_sl/__init__.py new file mode 100644 index 00000000000..c22e9d329db --- /dev/null +++ b/tests/components/touchline_sl/__init__.py @@ -0,0 +1 @@ +"""Tests for the Roth Touchline SL integration.""" diff --git a/tests/components/touchline_sl/conftest.py b/tests/components/touchline_sl/conftest.py new file mode 100644 index 00000000000..4edeb048f5b --- /dev/null +++ b/tests/components/touchline_sl/conftest.py @@ -0,0 +1,61 @@ +"""Common fixtures for the Roth Touchline SL tests.""" + +from collections.abc import Generator +from typing import NamedTuple +from unittest.mock import AsyncMock, patch + +import pytest + +from homeassistant.components.touchline_sl.const import DOMAIN +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME + +from tests.common import MockConfigEntry + + +class FakeModule(NamedTuple): + """Fake Module used for unit testing only.""" + + name: str + id: str + + +@pytest.fixture +def mock_setup_entry() -> Generator[AsyncMock]: + """Override async_setup_entry.""" + with patch( + "homeassistant.components.touchline_sl.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.fixture +def mock_touchlinesl_client() -> Generator[AsyncMock]: + """Mock a pytouchlinesl client.""" + with ( + patch( + "homeassistant.components.touchline_sl.TouchlineSL", + autospec=True, + ) as mock_client, + patch( + "homeassistant.components.touchline_sl.config_flow.TouchlineSL", + new=mock_client, + ), + ): + client = mock_client.return_value + client.user_id.return_value = 12345 + client.modules.return_value = [FakeModule(name="Foobar", id="deadbeef")] + yield client + + +@pytest.fixture +def mock_config_entry() -> MockConfigEntry: + """Mock a config entry.""" + return MockConfigEntry( + domain=DOMAIN, + title="TouchlineSL", + data={ + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", + }, + unique_id="12345", + ) diff --git a/tests/components/touchline_sl/test_config_flow.py b/tests/components/touchline_sl/test_config_flow.py new file mode 100644 index 00000000000..992fa2bdb3e --- /dev/null +++ b/tests/components/touchline_sl/test_config_flow.py @@ -0,0 +1,113 @@ +"""Test the Roth Touchline SL config flow.""" + +from unittest.mock import AsyncMock + +import pytest +from pytouchlinesl.client import RothAPIError + +from homeassistant.components.touchline_sl.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.const import CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType + +from tests.common import MockConfigEntry + +RESULT_UNIQUE_ID = "12345" + +CONFIG_DATA = { + CONF_USERNAME: "test-username", + CONF_PASSWORD: "test-password", +} + + +async def test_config_flow_success( + hass: HomeAssistant, mock_setup_entry: AsyncMock, mock_touchlinesl_client: AsyncMock +) -> None: + """Test the happy path where the provided username/password result in a new entry.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == CONFIG_DATA + assert result["result"].unique_id == RESULT_UNIQUE_ID + assert len(mock_setup_entry.mock_calls) == 1 + + +@pytest.mark.parametrize( + ("exception", "error_base"), + [ + (RothAPIError(status=401), "invalid_auth"), + (RothAPIError(status=502), "cannot_connect"), + (Exception, "unknown"), + ], +) +async def test_config_flow_failure_api_exceptions( + hass: HomeAssistant, + exception: Exception, + error_base: str, + mock_setup_entry: AsyncMock, + mock_touchlinesl_client: AsyncMock, +) -> None: + """Test for invalid credentials or API connection errors, and that the form can recover.""" + mock_touchlinesl_client.user_id.side_effect = exception + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.FORM + assert result["errors"] == {"base": error_base} + + # "Fix" the problem, and try again. + mock_touchlinesl_client.user_id.side_effect = None + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.CREATE_ENTRY + assert result["title"] == "test-username" + assert result["data"] == CONFIG_DATA + assert result["result"].unique_id == RESULT_UNIQUE_ID + assert len(mock_setup_entry.mock_calls) == 1 + + +async def test_config_flow_failure_adding_non_unique_account( + hass: HomeAssistant, + mock_setup_entry: AsyncMock, + mock_touchlinesl_client: AsyncMock, + mock_config_entry: MockConfigEntry, +) -> None: + """Test that the config flow fails when user tries to add duplicate accounts.""" + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert result["errors"] == {} + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], CONFIG_DATA + ) + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "already_configured" diff --git a/tests/components/tplink/__init__.py b/tests/components/tplink/__init__.py index c51a451c847..c63ca9139f1 100644 --- a/tests/components/tplink/__init__.py +++ b/tests/components/tplink/__init__.py @@ -39,7 +39,7 @@ from homeassistant.setup import async_setup_component from tests.common import MockConfigEntry, load_json_value_fixture -ColorTempRange = namedtuple("ColorTempRange", ["min", "max"]) +ColorTempRange = namedtuple("ColorTempRange", ["min", "max"]) # noqa: PYI024 MODULE = "homeassistant.components.tplink" MODULE_CONFIG_FLOW = "homeassistant.components.tplink.config_flow" diff --git a/tests/components/tplink/fixtures/features.json b/tests/components/tplink/fixtures/features.json index 7cfe979ea25..6d4afd98d15 100644 --- a/tests/components/tplink/fixtures/features.json +++ b/tests/components/tplink/fixtures/features.json @@ -150,6 +150,11 @@ "type": "Sensor", "category": "Debug" }, + "check_latest_firmware": { + "value": "", + "type": "Action", + "category": "Info" + }, "thermostat_mode": { "value": "off", "type": "Sensor", diff --git a/tests/components/tplink/test_config_flow.py b/tests/components/tplink/test_config_flow.py index ddd67f249e6..f90eb985d38 100644 --- a/tests/components/tplink/test_config_flow.py +++ b/tests/components/tplink/test_config_flow.py @@ -1289,6 +1289,33 @@ async def test_discovery_timeout_connect( assert mock_connect["connect"].call_count == 1 +async def test_discovery_timeout_connect_legacy_error( + hass: HomeAssistant, + mock_discovery: AsyncMock, + mock_connect: AsyncMock, + mock_init, +) -> None: + """Test discovery tries legacy connect on timeout.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": config_entries.SOURCE_USER} + ) + mock_discovery["discover_single"].side_effect = TimeoutError + mock_connect["connect"].side_effect = KasaException + await hass.async_block_till_done() + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "user" + assert not result["errors"] + assert mock_connect["connect"].call_count == 0 + + result2 = await hass.config_entries.flow.async_configure( + result["flow_id"], {CONF_HOST: IP_ADDRESS} + ) + await hass.async_block_till_done() + assert result2["type"] is FlowResultType.FORM + assert result2["errors"] == {"base": "cannot_connect"} + assert mock_connect["connect"].call_count == 1 + + async def test_reauth_update_other_flows( hass: HomeAssistant, mock_discovery: AsyncMock, diff --git a/tests/components/tplink_omada/snapshots/test_switch.ambr b/tests/components/tplink_omada/snapshots/test_switch.ambr index 282d2a4a6a5..a13d386e721 100644 --- a/tests/components/tplink_omada/snapshots/test_switch.ambr +++ b/tests/components/tplink_omada/snapshots/test_switch.ambr @@ -25,19 +25,6 @@ 'state': 'on', }) # --- -# name: test_gateway_disappear_disables_switches - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Router Port 4 Internet Connected', - 'icon': 'mdi:ethernet', - }), - 'context': , - 'entity_id': 'switch.test_router_port_4_internet_connected', - 'last_changed': , - 'last_updated': , - 'state': 'on', - }) -# --- # name: test_gateway_port_change_disables_switch_entities StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -110,144 +97,6 @@ 'unit_of_measurement': None, }) # --- -# name: test_poe_switches.10 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 6 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_6_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.11 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_6_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 6 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000006_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.12 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 7 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_7_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.13 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_7_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 7 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000007_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.14 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 8 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_8_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.15 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_8_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 8 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000008_poe', - 'unit_of_measurement': None, - }) -# --- # name: test_poe_switches.2 StateSnapshot({ 'attributes': ReadOnlyDict({ @@ -294,141 +143,3 @@ 'unit_of_measurement': None, }) # --- -# name: test_poe_switches.4 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 3 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_3_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.5 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_3_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 3 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000003_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.6 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 4 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_4_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.7 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_4_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 4 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000004_poe', - 'unit_of_measurement': None, - }) -# --- -# name: test_poe_switches.8 - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test PoE Switch Port 5 PoE', - }), - 'context': , - 'entity_id': 'switch.test_poe_switch_port_5_poe', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_poe_switches.9 - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_poe_switch_port_5_poe', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Port 5 PoE', - 'platform': 'tplink_omada', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'poe_control', - 'unique_id': '54-AF-97-00-00-01_000000000000000000000005_poe', - 'unit_of_measurement': None, - }) -# --- diff --git a/tests/components/tplink_omada/test_config_flow.py b/tests/components/tplink_omada/test_config_flow.py index 08606fe126c..28ef0da170f 100644 --- a/tests/components/tplink_omada/test_config_flow.py +++ b/tests/components/tplink_omada/test_config_flow.py @@ -251,14 +251,7 @@ async def test_async_step_reauth_success(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -298,14 +291,7 @@ async def test_async_step_reauth_invalid_auth(hass: HomeAssistant) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_entry.entry_id, - }, - data=mock_entry.data, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/tplink_omada/test_switch.py b/tests/components/tplink_omada/test_switch.py index 7d83140cc95..abce87714a9 100644 --- a/tests/components/tplink_omada/test_switch.py +++ b/tests/components/tplink_omada/test_switch.py @@ -19,7 +19,7 @@ from tplink_omada_client.exceptions import InvalidDevice from homeassistant.components import switch from homeassistant.components.tplink_omada.coordinator import POLL_GATEWAY from homeassistant.const import ATTR_ENTITY_ID -from homeassistant.core import HomeAssistant +from homeassistant.core import HomeAssistant, ServiceResponse from homeassistant.helpers import entity_registry as er from homeassistant.util.dt import utcnow @@ -336,7 +336,7 @@ def _get_updated_gateway_port_status( return OmadaGatewayPortStatus(gateway_data["portStats"][port]) -def call_service(hass: HomeAssistant, service: str, entity_id: str): +def call_service(hass: HomeAssistant, service: str, entity_id: str) -> ServiceResponse: """Call any service on entity.""" return hass.services.async_call( switch.DOMAIN, service, {ATTR_ENTITY_ID: entity_id}, blocking=True diff --git a/tests/components/traccar/test_init.py b/tests/components/traccar/test_init.py index b25ab6a0a34..49127aec347 100644 --- a/tests/components/traccar/test_init.py +++ b/tests/components/traccar/test_init.py @@ -63,7 +63,7 @@ async def setup_zones(hass: HomeAssistant) -> None: @pytest.fixture(name="webhook_id") -async def webhook_id_fixture(hass, client): +async def webhook_id_fixture(hass: HomeAssistant, client: TestClient) -> str: """Initialize the Traccar component and get the webhook_id.""" await async_process_ha_core_config( hass, diff --git a/tests/components/trace/test_websocket_api.py b/tests/components/trace/test_websocket_api.py index b0b982d4825..7b292ed39e3 100644 --- a/tests/components/trace/test_websocket_api.py +++ b/tests/components/trace/test_websocket_api.py @@ -39,8 +39,12 @@ def _find_traces(traces, trace_type, item_id): async def _setup_automation_or_script( - hass, domain, configs, script_config=None, stored_traces=None -): + hass: HomeAssistant, + domain: str, + configs: list[dict[str, Any]], + script_config: dict[str, Any] | None = None, + stored_traces: int | None = None, +) -> None: """Set up automations or scripts from automation config.""" if domain == "script": configs = {config["id"]: {"sequence": config["action"]} for config in configs} @@ -66,7 +70,13 @@ async def _setup_automation_or_script( assert await async_setup_component(hass, domain, {domain: configs}) -async def _run_automation_or_script(hass, domain, config, event, context=None): +async def _run_automation_or_script( + hass: HomeAssistant, + domain: str, + config: dict[str, Any], + event: str, + context: dict[str, Any] | None = None, +) -> None: if domain == "automation": hass.bus.async_fire(event, context=context) else: diff --git a/tests/components/tractive/conftest.py b/tests/components/tractive/conftest.py index 7f319a87b5b..88c68a4b62f 100644 --- a/tests/components/tractive/conftest.py +++ b/tests/components/tractive/conftest.py @@ -10,6 +10,7 @@ import pytest from homeassistant.components.tractive.const import DOMAIN, SERVER_UNAVAILABLE from homeassistant.const import CONF_EMAIL, CONF_PASSWORD +from homeassistant.core import HomeAssistant from homeassistant.helpers.dispatcher import async_dispatcher_send from tests.common import MockConfigEntry, load_json_object_fixture @@ -76,7 +77,7 @@ def mock_tractive_client() -> Generator[AsyncMock]: } entry.runtime_data.client._send_switch_update(event) - def send_server_unavailable_event(hass): + def send_server_unavailable_event(hass: HomeAssistant) -> None: """Send server unavailable event.""" async_dispatcher_send(hass, f"{SERVER_UNAVAILABLE}-12345") diff --git a/tests/components/tractive/snapshots/test_binary_sensor.ambr b/tests/components/tractive/snapshots/test_binary_sensor.ambr index c6d50fb0fbb..4b610e927d5 100644 --- a/tests/components/tractive/snapshots/test_binary_sensor.ambr +++ b/tests/components/tractive/snapshots/test_binary_sensor.ambr @@ -46,50 +46,3 @@ 'state': 'on', }) # --- -# name: test_sensor[binary_sensor.test_pet_tracker_battery_charging-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'binary_sensor', - 'entity_category': , - 'entity_id': 'binary_sensor.test_pet_tracker_battery_charging', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Tracker battery charging', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_battery_charging', - 'unique_id': 'pet_id_123_battery_charging', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[binary_sensor.test_pet_tracker_battery_charging-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'battery_charging', - 'friendly_name': 'Test Pet Tracker battery charging', - }), - 'context': , - 'entity_id': 'binary_sensor.test_pet_tracker_battery_charging', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- diff --git a/tests/components/tractive/snapshots/test_device_tracker.ambr b/tests/components/tractive/snapshots/test_device_tracker.ambr index 3a145a48b5a..4e7c5bfe173 100644 --- a/tests/components/tractive/snapshots/test_device_tracker.ambr +++ b/tests/components/tractive/snapshots/test_device_tracker.ambr @@ -50,54 +50,3 @@ 'state': 'not_home', }) # --- -# name: test_sensor[device_tracker.test_pet_tracker-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'device_tracker', - 'entity_category': , - 'entity_id': 'device_tracker.test_pet_tracker', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker', - 'unique_id': 'pet_id_123', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[device_tracker.test_pet_tracker-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'battery_level': 88, - 'friendly_name': 'Test Pet Tracker', - 'gps_accuracy': 99, - 'latitude': 22.333, - 'longitude': 44.555, - 'source_type': , - }), - 'context': , - 'entity_id': 'device_tracker.test_pet_tracker', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'not_home', - }) -# --- diff --git a/tests/components/tractive/snapshots/test_switch.ambr b/tests/components/tractive/snapshots/test_switch.ambr index ea9ea9d9e48..08e0c984d0c 100644 --- a/tests/components/tractive/snapshots/test_switch.ambr +++ b/tests/components/tractive/snapshots/test_switch.ambr @@ -1,142 +1,4 @@ # serializer version: 1 -# name: test_sensor[switch.test_pet_live_tracking-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_live_tracking', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Live tracking', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'live_tracking', - 'unique_id': 'pet_id_123_live_tracking', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_live_tracking-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Live tracking', - }), - 'context': , - 'entity_id': 'switch.test_pet_live_tracking', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.test_pet_tracker_buzzer-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_tracker_buzzer', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker buzzer', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_buzzer', - 'unique_id': 'pet_id_123_buzzer', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_tracker_buzzer-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Tracker buzzer', - }), - 'context': , - 'entity_id': 'switch.test_pet_tracker_buzzer', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'on', - }) -# --- -# name: test_sensor[switch.test_pet_tracker_led-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'switch', - 'entity_category': , - 'entity_id': 'switch.test_pet_tracker_led', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': 'Tracker LED', - 'platform': 'tractive', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'tracker_led', - 'unique_id': 'pet_id_123_led', - 'unit_of_measurement': None, - }) -# --- -# name: test_sensor[switch.test_pet_tracker_led-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'friendly_name': 'Test Pet Tracker LED', - }), - 'context': , - 'entity_id': 'switch.test_pet_tracker_led', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'off', - }) -# --- # name: test_switch[switch.test_pet_live_tracking-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/tractive/test_config_flow.py b/tests/components/tractive/test_config_flow.py index 5cedb51e5af..691bf671afd 100644 --- a/tests/components/tractive/test_config_flow.py +++ b/tests/components/tractive/test_config_flow.py @@ -110,15 +110,7 @@ async def test_reauthentication(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -151,15 +143,7 @@ async def test_reauthentication_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -189,15 +173,7 @@ async def test_reauthentication_unknown_failure(hass: HomeAssistant) -> None: ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -227,15 +203,7 @@ async def test_reauthentication_failure_no_existing_entry(hass: HomeAssistant) - ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/trafikverket_camera/test_config_flow.py b/tests/components/trafikverket_camera/test_config_flow.py index 2e9e34f4c35..dd75f5e6838 100644 --- a/tests/components/trafikverket_camera/test_config_flow.py +++ b/tests/components/trafikverket_camera/test_config_flow.py @@ -208,15 +208,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -280,15 +272,7 @@ async def test_reauth_flow_error( entry.add_to_hass(hass) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.trafikverket_camera.config_flow.TrafikverketCamera.async_get_cameras", diff --git a/tests/components/trafikverket_ferry/test_config_flow.py b/tests/components/trafikverket_ferry/test_config_flow.py index 1c170a917cc..916f9c9f2ec 100644 --- a/tests/components/trafikverket_ferry/test_config_flow.py +++ b/tests/components/trafikverket_ferry/test_config_flow.py @@ -128,15 +128,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -203,15 +195,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.trafikverket_ferry.config_flow.TrafikverketFerry.async_get_next_ferry_stop", diff --git a/tests/components/trafikverket_train/conftest.py b/tests/components/trafikverket_train/conftest.py index 4915635e316..14671d27252 100644 --- a/tests/components/trafikverket_train/conftest.py +++ b/tests/components/trafikverket_train/conftest.py @@ -38,7 +38,7 @@ async def load_integration_from_entry( return_value=get_train_stop, ), patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), ): await hass.config_entries.async_setup(config_entry_id) diff --git a/tests/components/trafikverket_train/test_config_flow.py b/tests/components/trafikverket_train/test_config_flow.py index 400f396d355..3090a9fe337 100644 --- a/tests/components/trafikverket_train/test_config_flow.py +++ b/tests/components/trafikverket_train/test_config_flow.py @@ -246,15 +246,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -328,15 +320,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( @@ -418,15 +402,7 @@ async def test_reauth_flow_error_departures( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( @@ -499,7 +475,7 @@ async def test_options_flow( with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_train/test_init.py b/tests/components/trafikverket_train/test_init.py index 06598297dd1..c8fea174e83 100644 --- a/tests/components/trafikverket_train/test_init.py +++ b/tests/components/trafikverket_train/test_init.py @@ -34,7 +34,7 @@ async def test_unload_entry( with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", @@ -69,7 +69,7 @@ async def test_auth_failed( entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", side_effect=InvalidAuthentication, ): await hass.config_entries.async_setup(entry.entry_id) @@ -99,7 +99,7 @@ async def test_no_stations( entry.add_to_hass(hass) with patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", side_effect=NoTrainStationFound, ): await hass.config_entries.async_setup(entry.entry_id) @@ -135,7 +135,7 @@ async def test_migrate_entity_unique_id( with ( patch( - "homeassistant.components.trafikverket_train.TrafikverketTrain.async_get_train_station", + "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_train_station", ), patch( "homeassistant.components.trafikverket_train.coordinator.TrafikverketTrain.async_get_next_train_stops", diff --git a/tests/components/trafikverket_weatherstation/test_config_flow.py b/tests/components/trafikverket_weatherstation/test_config_flow.py index 771336301ff..738d6a8ceac 100644 --- a/tests/components/trafikverket_weatherstation/test_config_flow.py +++ b/tests/components/trafikverket_weatherstation/test_config_flow.py @@ -116,14 +116,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -182,14 +175,7 @@ async def test_reauth_flow_fails( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} diff --git a/tests/components/transmission/test_config_flow.py b/tests/components/transmission/test_config_flow.py index e6c523bf1f6..b318862047e 100644 --- a/tests/components/transmission/test_config_flow.py +++ b/tests/components/transmission/test_config_flow.py @@ -160,14 +160,7 @@ async def test_reauth_success(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=transmission.DOMAIN, data=MOCK_CONFIG_DATA) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -197,14 +190,7 @@ async def test_reauth_failed(hass: HomeAssistant, mock_api: MagicMock) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -232,14 +218,7 @@ async def test_reauth_failed_connection_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - transmission.DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - data=MOCK_CONFIG_DATA, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/tts/common.py b/tests/components/tts/common.py index 1331f441940..b1eae12d694 100644 --- a/tests/components/tts/common.py +++ b/tests/components/tts/common.py @@ -130,6 +130,8 @@ class BaseProvider: def __init__(self, lang: str) -> None: """Initialize test provider.""" self._lang = lang + self._supported_languages = SUPPORT_LANGUAGES + self._supported_options = ["voice", "age"] @property def default_language(self) -> str: @@ -139,7 +141,7 @@ class BaseProvider: @property def supported_languages(self) -> list[str]: """Return list of supported languages.""" - return SUPPORT_LANGUAGES + return self._supported_languages @callback def async_get_supported_voices(self, language: str) -> list[Voice] | None: @@ -154,7 +156,7 @@ class BaseProvider: @property def supported_options(self) -> list[str]: """Return list of supported options like voice, emotions.""" - return ["voice", "age"] + return self._supported_options def get_tts_audio( self, message: str, language: str, options: dict[str, Any] @@ -163,7 +165,7 @@ class BaseProvider: return ("mp3", b"") -class MockProvider(BaseProvider, Provider): +class MockTTSProvider(BaseProvider, Provider): """Test speech API provider.""" def __init__(self, lang: str) -> None: @@ -175,10 +177,7 @@ class MockProvider(BaseProvider, Provider): class MockTTSEntity(BaseProvider, TextToSpeechEntity): """Test speech API provider.""" - @property - def name(self) -> str: - """Return the name of the entity.""" - return "Test" + _attr_name = "Test" class MockTTS(MockPlatform): @@ -188,7 +187,7 @@ class MockTTS(MockPlatform): {vol.Optional(CONF_LANG, default=DEFAULT_LANG): vol.In(SUPPORT_LANGUAGES)} ) - def __init__(self, provider: MockProvider, **kwargs: Any) -> None: + def __init__(self, provider: MockTTSProvider, **kwargs: Any) -> None: """Initialize.""" super().__init__(**kwargs) self._provider = provider @@ -205,7 +204,7 @@ class MockTTS(MockPlatform): async def mock_setup( hass: HomeAssistant, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Set up a test provider.""" mock_integration(hass, MockModule(domain=TEST_DOMAIN)) @@ -218,7 +217,9 @@ async def mock_setup( async def mock_config_entry_setup( - hass: HomeAssistant, tts_entity: MockTTSEntity + hass: HomeAssistant, + tts_entity: MockTTSEntity, + test_domain: str = TEST_DOMAIN, ) -> MockConfigEntry: """Set up a test tts platform via config entry.""" @@ -239,7 +240,7 @@ async def mock_config_entry_setup( mock_integration( hass, MockModule( - TEST_DOMAIN, + test_domain, async_setup_entry=async_setup_entry_init, async_unload_entry=async_unload_entry_init, ), @@ -254,9 +255,9 @@ async def mock_config_entry_setup( async_add_entities([tts_entity]) loaded_platform = MockPlatform(async_setup_entry=async_setup_entry_platform) - mock_platform(hass, f"{TEST_DOMAIN}.{TTS_DOMAIN}", loaded_platform) + mock_platform(hass, f"{test_domain}.{TTS_DOMAIN}", loaded_platform) - config_entry = MockConfigEntry(domain=TEST_DOMAIN) + config_entry = MockConfigEntry(domain=test_domain) config_entry.add_to_hass(hass) assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() diff --git a/tests/components/tts/conftest.py b/tests/components/tts/conftest.py index d9a4499f544..16c24f006d7 100644 --- a/tests/components/tts/conftest.py +++ b/tests/components/tts/conftest.py @@ -3,7 +3,8 @@ From http://doc.pytest.org/en/latest/example/simple.html#making-test-result-information-available-in-fixtures """ -from collections.abc import Generator +from collections.abc import Generator, Iterable +from contextlib import ExitStack from pathlib import Path from unittest.mock import MagicMock @@ -16,9 +17,9 @@ from homeassistant.core import HomeAssistant from .common import ( DEFAULT_LANG, TEST_DOMAIN, - MockProvider, MockTTS, MockTTSEntity, + MockTTSProvider, mock_config_entry_setup, mock_setup, ) @@ -66,9 +67,9 @@ async def mock_tts(hass: HomeAssistant, mock_provider) -> None: @pytest.fixture -def mock_provider() -> MockProvider: +def mock_provider() -> MockTTSProvider: """Test TTS provider.""" - return MockProvider(DEFAULT_LANG) + return MockTTSProvider(DEFAULT_LANG) @pytest.fixture @@ -81,12 +82,23 @@ class TTSFlow(ConfigFlow): """Test flow.""" -@pytest.fixture(autouse=True) -def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: - """Mock config flow.""" - mock_platform(hass, f"{TEST_DOMAIN}.config_flow") +@pytest.fixture(name="config_flow_test_domains") +def config_flow_test_domain_fixture() -> Iterable[str]: + """Test domain fixture.""" + return (TEST_DOMAIN,) - with mock_config_flow(TEST_DOMAIN, TTSFlow): + +@pytest.fixture(autouse=True) +def config_flow_fixture( + hass: HomeAssistant, config_flow_test_domains: Iterable[str] +) -> Generator[None]: + """Mock config flow.""" + for domain in config_flow_test_domains: + mock_platform(hass, f"{domain}.config_flow") + + with ExitStack() as stack: + for domain in config_flow_test_domains: + stack.enter_context(mock_config_flow(domain, TTSFlow)) yield @@ -94,7 +106,7 @@ def config_flow_fixture(hass: HomeAssistant) -> Generator[None]: async def setup_fixture( hass: HomeAssistant, request: pytest.FixtureRequest, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, mock_tts_entity: MockTTSEntity, ) -> None: """Set up the test environment.""" diff --git a/tests/components/tts/test_init.py b/tests/components/tts/test_init.py index bf44f120134..cf04fbb175b 100644 --- a/tests/components/tts/test_init.py +++ b/tests/components/tts/test_init.py @@ -30,15 +30,22 @@ from .common import ( DEFAULT_LANG, SUPPORT_LANGUAGES, TEST_DOMAIN, - MockProvider, + MockTTS, MockTTSEntity, + MockTTSProvider, get_media_source_url, mock_config_entry_setup, mock_setup, retrieve_media, ) -from tests.common import async_mock_service, mock_restore_cache +from tests.common import ( + MockModule, + async_mock_service, + mock_integration, + mock_platform, + mock_restore_cache, +) from tests.typing import ClientSessionGenerator, WebSocketGenerator ORIG_WRITE_TAGS = tts.SpeechManager.write_tags @@ -213,7 +220,7 @@ async def test_service( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("de_DE"), MockTTSEntity("de_DE"))], + [(MockTTSProvider("de_DE"), MockTTSEntity("de_DE"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -274,7 +281,7 @@ async def test_service_default_language( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("en_US"), MockTTSEntity("en_US"))], + [(MockTTSProvider("en_US"), MockTTSEntity("en_US"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -504,7 +511,7 @@ async def test_service_options( ).is_file() -class MockProviderWithDefaults(MockProvider): +class MockProviderWithDefaults(MockTTSProvider): """Mock provider with default options.""" @property @@ -847,7 +854,7 @@ async def test_service_receive_voice( @pytest.mark.parametrize( ("mock_provider", "mock_tts_entity"), - [(MockProvider("de_DE"), MockTTSEntity("de_DE"))], + [(MockTTSProvider("de_DE"), MockTTSEntity("de_DE"))], ) @pytest.mark.parametrize( ("setup", "tts_service", "service_data", "expected_url_suffix"), @@ -1008,7 +1015,7 @@ async def test_service_without_cache( ).is_file() -class MockProviderBoom(MockProvider): +class MockProviderBoom(MockTTSProvider): """Mock provider that blows up.""" def get_tts_audio( @@ -1016,7 +1023,7 @@ class MockProviderBoom(MockProvider): ) -> tts.TtsAudioType: """Load TTS dat.""" # This should not be called, data should be fetched from cache - raise Exception("Boom!") # pylint: disable=broad-exception-raised + raise Exception("Boom!") # noqa: TRY002 class MockEntityBoom(MockTTSEntity): @@ -1027,14 +1034,14 @@ class MockEntityBoom(MockTTSEntity): ) -> tts.TtsAudioType: """Load TTS dat.""" # This should not be called, data should be fetched from cache - raise Exception("Boom!") # pylint: disable=broad-exception-raised + raise Exception("Boom!") # noqa: TRY002 @pytest.mark.parametrize("mock_provider", [MockProviderBoom(DEFAULT_LANG)]) async def test_setup_legacy_cache_dir( hass: HomeAssistant, mock_tts_cache_dir: Path, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Set up a TTS platform with cache and call service without cache.""" calls = async_mock_service(hass, DOMAIN_MP, SERVICE_PLAY_MEDIA) @@ -1099,7 +1106,7 @@ async def test_setup_cache_dir( await hass.async_block_till_done() -class MockProviderEmpty(MockProvider): +class MockProviderEmpty(MockTTSProvider): """Mock provider with empty get_tts_audio.""" def get_tts_audio( @@ -1171,7 +1178,7 @@ async def test_service_get_tts_error( async def test_load_cache_legacy_retrieve_without_mem_cache( hass: HomeAssistant, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, mock_tts_cache_dir: Path, hass_client: ClientSessionGenerator, ) -> None: @@ -1389,9 +1396,6 @@ def test_resolve_engine(hass: HomeAssistant, setup: str, engine_id: str) -> None ): assert tts.async_resolve_engine(hass, None) is None - with patch.dict(hass.data[tts.DATA_TTS_MANAGER].providers, {"cloud": object()}): - assert tts.async_resolve_engine(hass, None) == "cloud" - @pytest.mark.parametrize( ("setup", "engine_id"), @@ -1422,7 +1426,7 @@ async def test_legacy_fetching_in_async( """Test async fetching of data for a legacy provider.""" tts_audio: asyncio.Future[bytes] = asyncio.Future() - class ProviderWithAsyncFetching(MockProvider): + class ProviderWithAsyncFetching(MockTTSProvider): """Provider that supports audio output option.""" @property @@ -1561,15 +1565,19 @@ async def test_fetching_in_async( @pytest.mark.parametrize( - ("setup", "engine_id"), + ("setup", "engine_id", "extra_data"), [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), + ("mock_setup", "test", {"name": "Test"}), + ("mock_config_entry_setup", "tts.test", {}), ], indirect=["setup"], ) async def test_ws_list_engines( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup: str, engine_id: str + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup: str, + engine_id: str, + extra_data: dict[str, str], ) -> None: """Test listing tts engines and supported languages.""" client = await hass_ws_client() @@ -1584,6 +1592,7 @@ async def test_ws_list_engines( "engine_id": engine_id, "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], } + | extra_data ] } @@ -1592,7 +1601,7 @@ async def test_ws_list_engines( msg = await client.receive_json() assert msg["success"] assert msg["result"] == { - "providers": [{"engine_id": engine_id, "supported_languages": []}] + "providers": [{"engine_id": engine_id, "supported_languages": []} | extra_data] } await client.send_json_auto_id({"type": "tts/engine/list", "language": "en"}) @@ -1602,6 +1611,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["en_US", "en_GB"]} + | extra_data ] } @@ -1612,6 +1622,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["en_GB", "en_US"]} + | extra_data ] } @@ -1622,6 +1633,7 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de_DE", "de_CH"]} + | extra_data ] } @@ -1634,20 +1646,74 @@ async def test_ws_list_engines( assert msg["result"] == { "providers": [ {"engine_id": engine_id, "supported_languages": ["de_CH", "de_DE"]} + | extra_data + ] + } + + +async def test_ws_list_engines_deprecated( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + mock_tts_entity: MockTTSEntity, +) -> None: + """Test listing tts engines. + + This test asserts the deprecated flag is set on a legacy engine whose integration + also provides tts entities. + """ + + mock_provider = MockTTSProvider(DEFAULT_LANG) + mock_provider_2 = MockTTSProvider(DEFAULT_LANG) + mock_integration(hass, MockModule(domain="test")) + mock_platform(hass, "test.tts", MockTTS(mock_provider)) + mock_integration(hass, MockModule(domain="test_2")) + mock_platform(hass, "test_2.tts", MockTTS(mock_provider_2)) + await async_setup_component( + hass, "tts", {"tts": [{"platform": "test"}, {"platform": "test_2"}]} + ) + await mock_config_entry_setup(hass, mock_tts_entity) + + client = await hass_ws_client() + + await client.send_json_auto_id({"type": "tts/engine/list"}) + + msg = await client.receive_json() + assert msg["success"] + assert msg["result"] == { + "providers": [ + { + "engine_id": "tts.test", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, + { + "deprecated": True, + "engine_id": "test", + "name": "Test", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, + { + "engine_id": "test_2", + "name": "Test", + "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], + }, ] } @pytest.mark.parametrize( - ("setup", "engine_id"), + ("setup", "engine_id", "extra_data"), [ - ("mock_setup", "test"), - ("mock_config_entry_setup", "tts.test"), + ("mock_setup", "test", {"name": "Test"}), + ("mock_config_entry_setup", "tts.test", {}), ], indirect=["setup"], ) async def test_ws_get_engine( - hass: HomeAssistant, hass_ws_client: WebSocketGenerator, setup: str, engine_id: str + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + setup: str, + engine_id: str, + extra_data: dict[str, str], ) -> None: """Test getting an tts engine.""" client = await hass_ws_client() @@ -1661,6 +1727,7 @@ async def test_ws_get_engine( "engine_id": engine_id, "supported_languages": ["de_CH", "de_DE", "en_GB", "en_US"], } + | extra_data } @@ -1838,3 +1905,61 @@ async def test_ttsentity_subclass_properties( if record.exc_info is not None ] ) + + +async def test_default_engine_prefer_entity( + hass: HomeAssistant, + mock_tts_entity: MockTTSEntity, + mock_provider: MockTTSProvider, +) -> None: + """Test async_default_engine. + + In this tests there's an entity and a legacy provider. + The test asserts async_default_engine returns the entity. + """ + mock_tts_entity._attr_name = "New test" + + await mock_setup(hass, mock_provider) + await mock_config_entry_setup(hass, mock_tts_entity) + await hass.async_block_till_done() + + entity_engine = tts.async_resolve_engine(hass, "tts.new_test") + assert entity_engine == "tts.new_test" + provider_engine = tts.async_resolve_engine(hass, "test") + assert provider_engine == "test" + assert tts.async_default_engine(hass) == "tts.new_test" + + +@pytest.mark.parametrize( + "config_flow_test_domains", + [ + # Test different setup order to ensure the default is not influenced + # by setup order. + ("cloud", "new_test"), + ("new_test", "cloud"), + ], +) +async def test_default_engine_prefer_cloud_entity( + hass: HomeAssistant, + mock_provider: MockTTSProvider, + config_flow_test_domains: str, +) -> None: + """Test async_default_engine. + + In this tests there's an entity from domain cloud, an entity from domain new_test + and a legacy provider. + The test asserts async_default_engine returns the entity from domain cloud. + """ + await mock_setup(hass, mock_provider) + for domain in config_flow_test_domains: + entity = MockTTSEntity(DEFAULT_LANG) + entity._attr_name = f"{domain} TTS entity" + await mock_config_entry_setup(hass, entity, test_domain=domain) + await hass.async_block_till_done() + + for domain in config_flow_test_domains: + entity_engine = tts.async_resolve_engine(hass, f"tts.{domain}_tts_entity") + assert entity_engine == f"tts.{domain}_tts_entity" + provider_engine = tts.async_resolve_engine(hass, "test") + assert provider_engine == "test" + assert tts.async_default_engine(hass) == "tts.cloud_tts_entity" diff --git a/tests/components/tts/test_legacy.py b/tests/components/tts/test_legacy.py index 05bb6dec10f..22e8ac35f16 100644 --- a/tests/components/tts/test_legacy.py +++ b/tests/components/tts/test_legacy.py @@ -17,7 +17,7 @@ from homeassistant.helpers.discovery import async_load_platform from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from homeassistant.setup import async_setup_component -from .common import SUPPORT_LANGUAGES, MockProvider, MockTTS +from .common import SUPPORT_LANGUAGES, MockTTS, MockTTSProvider from tests.common import ( MockModule, @@ -75,7 +75,9 @@ async def test_invalid_platform( async def test_platform_setup_without_provider( - hass: HomeAssistant, caplog: pytest.LogCaptureFixture, mock_provider: MockProvider + hass: HomeAssistant, + caplog: pytest.LogCaptureFixture, + mock_provider: MockTTSProvider, ) -> None: """Test platform setup without provider returned.""" @@ -109,7 +111,7 @@ async def test_platform_setup_without_provider( async def test_platform_setup_with_error( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, - mock_provider: MockProvider, + mock_provider: MockTTSProvider, ) -> None: """Test platform setup with an error during setup.""" @@ -123,7 +125,7 @@ async def test_platform_setup_with_error( discovery_info: DiscoveryInfoType | None = None, ) -> Provider: """Raise exception during platform setup.""" - raise Exception("Setup error") # pylint: disable=broad-exception-raised + raise Exception("Setup error") # noqa: TRY002 mock_integration(hass, MockModule(domain="bad_tts")) mock_platform(hass, "bad_tts.tts", BadPlatform(mock_provider)) diff --git a/tests/components/tts/test_media_source.py b/tests/components/tts/test_media_source.py index 4c10d8f0b08..ba856fd9622 100644 --- a/tests/components/tts/test_media_source.py +++ b/tests/components/tts/test_media_source.py @@ -12,8 +12,8 @@ from homeassistant.setup import async_setup_component from .common import ( DEFAULT_LANG, - MockProvider, MockTTSEntity, + MockTTSProvider, mock_config_entry_setup, mock_setup, retrieve_media, @@ -28,7 +28,7 @@ class MSEntity(MockTTSEntity): get_tts_audio = MagicMock(return_value=("mp3", b"")) -class MSProvider(MockProvider): +class MSProvider(MockTTSProvider): """Test speech API provider.""" get_tts_audio = MagicMock(return_value=("mp3", b"")) diff --git a/tests/components/tuya/test_config_flow.py b/tests/components/tuya/test_config_flow.py index 6e971262bc8..247aec02cd1 100644 --- a/tests/components/tuya/test_config_flow.py +++ b/tests/components/tuya/test_config_flow.py @@ -8,7 +8,7 @@ import pytest from syrupy.assertion import SnapshotAssertion from homeassistant.components.tuya.const import CONF_APP_TYPE, CONF_USER_CODE, DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -145,15 +145,7 @@ async def test_reauth_flow( """Test the reauthentication configuration flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "scan" @@ -185,15 +177,7 @@ async def test_reauth_flow_migration( assert CONF_APP_TYPE in mock_old_config_entry.data assert CONF_USER_CODE not in mock_old_config_entry.data - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_old_config_entry.unique_id, - "entry_id": mock_old_config_entry.entry_id, - }, - data=mock_old_config_entry.data, - ) + result = await mock_old_config_entry.start_reauth_flow(hass) assert result.get("type") is FlowResultType.FORM assert result.get("step_id") == "reauth_user_code" @@ -229,15 +213,7 @@ async def test_reauth_flow_failed_qr_code( """Test an error occurring while retrieving the QR code.""" mock_old_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": mock_old_config_entry.unique_id, - "entry_id": mock_old_config_entry.entry_id, - }, - data=mock_old_config_entry.data, - ) + result = await mock_old_config_entry.start_reauth_flow(hass) # Something went wrong getting the QR code (like an invalid user code) mock_tuya_login_control.qr_code.return_value["success"] = False diff --git a/tests/components/twitch/test_config_flow.py b/tests/components/twitch/test_config_flow.py index 6935943a4d3..fc53b17551c 100644 --- a/tests/components/twitch/test_config_flow.py +++ b/tests/components/twitch/test_config_flow.py @@ -10,7 +10,7 @@ from homeassistant.components.twitch.const import ( DOMAIN, OAUTH2_AUTHORIZE, ) -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResult, FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -109,14 +109,7 @@ async def test_reauth( ) -> None: """Check reauth flow.""" await setup_integration(hass, config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -184,14 +177,7 @@ async def test_reauth_wrong_account( twitch_mock.return_value.get_users = lambda *args, **kwargs: get_generator( "get_users_2.json", TwitchUser ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/unifi/snapshots/test_button.ambr b/tests/components/unifi/snapshots/test_button.ambr index 51a37620268..de305aee7eb 100644 --- a/tests/components/unifi/snapshots/test_button.ambr +++ b/tests/components/unifi/snapshots/test_button.ambr @@ -1,98 +1,4 @@ # serializer version: 1 -# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_port_1_power_cycle-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.switch_port_1_power_cycle', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Port 1 Power Cycle', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'power_cycle-00:00:00:00:01:01_1', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_port_1_power_cycle-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'switch Port 1 Power Cycle', - }), - 'context': , - 'entity_id': 'button.switch_port_1_power_cycle', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_restart-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'button', - 'entity_category': , - 'entity_id': 'button.switch_restart', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Restart', - 'platform': 'unifi', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': None, - 'unique_id': 'device_restart-00:00:00:00:01:01', - 'unit_of_measurement': None, - }) -# --- -# name: test_entity_and_device_data[site_payload0-device_payload0][button.switch_restart-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'device_class': 'restart', - 'friendly_name': 'switch Restart', - }), - 'context': , - 'entity_id': 'button.switch_restart', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': 'unknown', - }) -# --- # name: test_entity_and_device_data[site_payload0-wlan_payload0-device_payload0][button.ssid_1_regenerate_password-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/unifi/snapshots/test_image.ambr b/tests/components/unifi/snapshots/test_image.ambr index e33ec678217..0922320ed4d 100644 --- a/tests/components/unifi/snapshots/test_image.ambr +++ b/tests/components/unifi/snapshots/test_image.ambr @@ -47,12 +47,6 @@ 'state': '2021-01-01T01:01:00+00:00', }) # --- -# name: test_wlan_qr_code - b'\x89PNG\r\n\x1a\n\x00\x00\x00\rIHDR\x00\x00\x00\x84\x00\x00\x00\x84\x01\x00\x00\x00\x00y?\xbe\n\x00\x00\x00\xcaIDATx\xda\xedV[\n\xc30\x0c\x13\xbb\x80\xef\x7fK\xdd\xc0\x93\x94\xfd\xac\x1fcL\xfbl(\xc4\x04*\xacG\xdcb/\x8b\xb8O\xdeO\x00\xccP\x95\x8b\xe5\x03\xd7\xf5\xcd\x89pF\xcf\x8c \\48\x08\nS\x948\x03p\xfe\x80C\xa8\x9d\x16\xc7P\xabvJ}\xe2\xd7\x84[\xe5W\xfc7\xbbS\xfd\xde\xcfB\xf115\xa2\xe3%\x99\xad\x93\xa0:\xbf6\xbeS\xec\x1a^\xb4\xed\xfb\xb2\xab\xd1\x99\xc9\xcdAjx\x89\x0e\xc5\xea\xf4T\xf9\xee\xe40m58\xb6<\x1b\xab~\xf4\xban\xd7:\xceu\x9e\x05\xc4I\xa6\xbb\xfb%q<7:\xbf\xa2\x90wo\xf5 -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].3 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-config_entry_options0].4 - '1234.0' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].3 - 'Wired client RX' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_rx-rx--config_entry_options0].6 - '1234.0' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].2 - 'timestamp' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].3 - 'Wired client Uptime' -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].4 - None -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].5 - None -# --- -# name: test_sensor_sources[client_payload0-sensor.wired_client_uptime-uptime--config_entry_options0].6 - '2020-09-14T14:41:45+00:00' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].3 - 'Wired client RX' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_rx-rx--config_entry_options0].6 - '1234.0' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0] - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].1 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].2 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].3 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-config_entry_options0].4 - '5678.0' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].3 - 'Wired client TX' -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload1-sensor.wired_client_tx-tx--config_entry_options0].6 - '5678.0' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].3 - 'Wired client TX' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_tx-tx--config_entry_options0].6 - '5678.0' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].2 - 'timestamp' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].3 - 'Wired client Uptime' -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].4 - None -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].5 - None -# --- -# name: test_sensor_sources[client_payload2-sensor.wired_client_uptime-uptime--config_entry_options0].6 - '2020-09-14T14:41:45+00:00' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:02' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].3 - 'Wireless client RX' -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload2-sensor.wireless_client_rx-rx--config_entry_options0].6 - '2345.0' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0] - 'rx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].3 - 'Wireless client RX' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_rx-rx--config_entry_options0].6 - '2345.0' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:02' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].3 - 'Wireless client TX' -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload3-sensor.wireless_client_tx-tx--config_entry_options0].6 - '6789.0' -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0] - 'tx-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].2 - 'data_rate' -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].3 - 'Wireless client TX' -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].4 - -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].5 - -# --- -# name: test_sensor_sources[client_payload4-sensor.wireless_client_tx-tx--config_entry_options0].6 - '6789.0' -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0] - 'uptime-00:00:00:00:00:01' -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].1 - -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].2 - 'timestamp' -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].3 - 'Wireless client Uptime' -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].4 - None -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].5 - None -# --- -# name: test_sensor_sources[client_payload5-sensor.wireless_client_uptime-uptime--config_entry_options0].6 - '2021-01-01T01:00:00+00:00' +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Device Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.device_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Device State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.device_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Temperature', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_temperature-20:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'Device Temperature', + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.device_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.device_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-20:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.device_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Device Uptime', + }), + 'context': , + 'entity_id': 'sensor.device_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T01:00:00+00:00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_budget-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_budget', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC Power Budget', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ac_power_budget-01:02:03:04:05:ff', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_budget-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro AC Power Budget', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_budget', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1875.000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_consumption-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_consumption', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor': dict({ + 'suggested_display_precision': 1, + }), + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'AC Power Consumption', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'ac_power_conumption-01:02:03:04:05:ff', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_ac_power_consumption-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro AC Power Consumption', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_ac_power_consumption', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '201.683', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_cpu_utilization-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_cpu_utilization', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'CPU utilization', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cpu_utilization-01:02:03:04:05:ff', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_cpu_utilization-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro CPU utilization', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_cpu_utilization', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1.4', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_memory_utilization-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_memory_utilization', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Memory utilization', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'memory_utilization-01:02:03:04:05:ff', + 'unit_of_measurement': '%', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_memory_utilization-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Dummy USP-PDU-Pro Memory utilization', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_memory_utilization', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '28.9', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_outlet_2_outlet_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_outlet_2_outlet_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2 Outlet Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet_power-01:02:03:04:05:ff_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_outlet_2_outlet_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2 Outlet Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_outlet_2_outlet_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '73.827', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'Dummy USP-PDU-Pro State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-01:02:03:04:05:ff', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.dummy_usp_pdu_pro_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Dummy USP-PDU-Pro Uptime', + }), + 'context': , + 'entity_id': 'sensor.dummy_usp_pdu_pro_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020-12-18T05:36:58+00:00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_clients-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_clients', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Clients', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_clients-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_clients-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'mock-name Clients', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_clients', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_cloudflare_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloudflare WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cloudflare_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Cloudflare WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_cloudflare_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_cloudflare_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Cloudflare WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'cloudflare_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_cloudflare_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Cloudflare WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_cloudflare_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_google_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Google WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'google_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Google WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_google_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_google_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Google WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'google_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_google_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Google WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_google_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '53', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan2_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_microsoft_wan2_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Microsoft WAN2 latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'microsoft_wan2_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan2_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Microsoft WAN2 latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_microsoft_wan2_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan_latency-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_microsoft_wan_latency', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Microsoft WAN latency', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'microsoft_wan_latency-10:00:00:00:01:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_microsoft_wan_latency-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'duration', + 'friendly_name': 'mock-name Microsoft WAN latency', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_microsoft_wan_latency', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '56', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 1 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.56', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 1 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 1 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_1_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 1 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_1', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_1_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 1 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_1_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 2 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 2 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2.56', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 2 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 2 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_2_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 2 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_2', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_2_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 2 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_2_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_3_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 3 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 3 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_3_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_3_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 3 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_3', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_3_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 3 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_3_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_poe_power-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_poe_power', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 4 PoE Power', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe_power-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_poe_power-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'power', + 'friendly_name': 'mock-name Port 4 PoE Power', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_poe_power', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'Port 4 RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_rx-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 4 RX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_port_4_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + 'sensor.private': dict({ + 'suggested_unit_of_measurement': , + }), + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'Port 4 TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_tx-10:00:00:00:01:01_4', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_port_4_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'mock-name Port 4 TX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.mock_name_port_4_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.00000', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_state-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_state', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'State', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_state-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_state-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'enum', + 'friendly_name': 'mock-name State', + 'options': list([ + 'Disconnected', + 'Connected', + 'Pending', + 'Firmware Mismatch', + 'Upgrading', + 'Provisioning', + 'Heartbeat Missed', + 'Adopting', + 'Deleting', + 'Inform Error', + 'Adoption Failed', + 'Isolated', + 'Unknown', + ]), + }), + 'context': , + 'entity_id': 'sensor.mock_name_state', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Connected', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.mock_name_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_uptime-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.mock_name_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'mock-name Uptime', + }), + 'context': , + 'entity_id': 'sensor.mock_name_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan_clients-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'SSID 1', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'rx-00:00:00:00:00:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wired client RX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wired_client_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '1234.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'tx-00:00:00:00:00:01', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wired client TX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wired_client_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '5678.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wired_client_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uptime-00:00:00:00:00:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wired_client_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Wired client Uptime', + }), + 'context': , + 'entity_id': 'sensor.wired_client_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2020-09-14T14:41:45+00:00', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_rx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_rx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload', + 'original_name': 'RX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'rx-00:00:00:00:00:02', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_rx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wireless client RX', + 'icon': 'mdi:upload', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wireless_client_rx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2345.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_tx-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_tx', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:download', + 'original_name': 'TX', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'tx-00:00:00:00:00:02', + 'unit_of_measurement': , + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_tx-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'data_rate', + 'friendly_name': 'Wireless client TX', + 'icon': 'mdi:download', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.wireless_client_tx', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '6789.0', + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_uptime-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.wireless_client_uptime', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Uptime', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'uptime-00:00:00:00:00:02', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[wlan_payload0-device_payload0-client_payload0-config_entry_options0][sensor.wireless_client_uptime-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'timestamp', + 'friendly_name': 'Wireless client Uptime', + }), + 'context': , + 'entity_id': 'sensor.wireless_client_uptime', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '2021-01-01T01:00:00+00:00', + }) # --- diff --git a/tests/components/unifi/snapshots/test_switch.ambr b/tests/components/unifi/snapshots/test_switch.ambr new file mode 100644 index 00000000000..04b15f329fd --- /dev/null +++ b/tests/components/unifi/snapshots/test_switch.ambr @@ -0,0 +1,2473 @@ +# serializer version: 1 +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_1_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 1 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_1_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 1 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_1_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_2_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 2 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_2_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 2 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_2_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_port_4_power_cycle', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Port 4 Power Cycle', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'power_cycle-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_port_4_power_cycle-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Port 4 Power Cycle', + }), + 'context': , + 'entity_id': 'button.mock_name_port_4_power_cycle', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.mock_name_restart', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Restart', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'device_restart-10:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][button.mock_name_restart-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'restart', + 'friendly_name': 'mock-name Restart', + }), + 'context': , + 'entity_id': 'button.mock_name_restart', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'block-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Block Client 1', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.block_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_client_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'block-00:00:00:00:01:01', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_client_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'Block Client 1', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.block_client_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'off', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.block_media_streaming', + 'has_entity_name': False, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': 'mdi:network', + 'original_name': 'Block Media Streaming', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': '5f976f4ae3c58f018ec7dff6', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.block_media_streaming-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'Block Media Streaming', + 'icon': 'mdi:network', + }), + 'context': , + 'entity_id': 'switch.block_media_streaming', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 2', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_outlet_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro Outlet 2', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_outlet_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'USB Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-01:02:03:04:05:ff_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.dummy_usp_pdu_pro_usb_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Dummy USP-PDU-Pro USB Outlet 1', + }), + 'context': , + 'entity_id': 'switch.dummy_usp_pdu_pro_usb_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 1 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_1_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 1 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_1_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 2 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_2', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_2_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 2 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_2_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:ethernet', + 'original_name': 'Port 4 PoE', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'poe-10:00:00:00:01:01_4', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.mock_name_port_4_poe-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'mock-name Port 4 PoE', + 'icon': 'mdi:ethernet', + }), + 'context': , + 'entity_id': 'switch.mock_name_port_4_poe', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': None, + 'entity_id': 'switch.plug_outlet_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outlet 1', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'outlet-fc:ec:da:76:4f:5f_1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.plug_outlet_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'outlet', + 'friendly_name': 'Plug Outlet 1', + }), + 'context': , + 'entity_id': 'switch.plug_outlet_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.ssid_1', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:wifi-check', + 'original_name': None, + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'wlan-012345678910111213141516', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.ssid_1-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'SSID 1', + 'icon': 'mdi:wifi-check', + }), + 'context': , + 'entity_id': 'switch.ssid_1', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_plex', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:upload-network', + 'original_name': 'plex', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'port_forward-5a32aa4ee4b0412345678911', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_plex-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network plex', + 'icon': 'mdi:upload-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_plex', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_test_traffic_rule-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'switch', + 'entity_category': , + 'entity_id': 'switch.unifi_network_test_traffic_rule', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': 'mdi:security-network', + 'original_name': 'Test Traffic Rule', + 'platform': 'unifi', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': None, + 'unique_id': 'traffic_rule-6452cd9b859d5b11aa002ea1', + 'unit_of_measurement': None, + }) +# --- +# name: test_entity_and_device_data[site_payload0-wlan_payload0-traffic_rule_payload0-port_forward_payload0-dpi_group_payload0-dpi_app_payload0-device_payload0-client_payload0-config_entry_options0][switch.unifi_network_test_traffic_rule-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'switch', + 'friendly_name': 'UniFi Network Test Traffic Rule', + 'icon': 'mdi:security-network', + }), + 'context': , + 'entity_id': 'switch.unifi_network_test_traffic_rule', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/unifi/test_config_flow.py b/tests/components/unifi/test_config_flow.py index 1d745511dc5..71b196550da 100644 --- a/tests/components/unifi/test_config_flow.py +++ b/tests/components/unifi/test_config_flow.py @@ -24,7 +24,6 @@ from homeassistant.components.unifi.const import ( CONF_TRACK_WIRED_CLIENTS, DOMAIN as UNIFI_DOMAIN, ) -from homeassistant.config_entries import SOURCE_REAUTH from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, @@ -302,15 +301,7 @@ async def test_reauth_flow_update_configuration( """Verify reauth flow can update hub configuration.""" config_entry = config_entry_setup - result = await hass.config_entries.flow.async_init( - UNIFI_DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" @@ -344,15 +335,7 @@ async def test_reauth_flow_update_configuration_on_not_loaded_entry( with patch("aiounifi.Controller.login", side_effect=aiounifi.errors.RequestError): config_entry = await config_entry_factory() - result = await hass.config_entries.flow.async_init( - UNIFI_DOMAIN, - context={ - "source": SOURCE_REAUTH, - "unique_id": config_entry.unique_id, - "entry_id": config_entry.entry_id, - }, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "user" diff --git a/tests/components/unifi/test_sensor.py b/tests/components/unifi/test_sensor.py index 5af4b297847..3c94d12018d 100644 --- a/tests/components/unifi/test_sensor.py +++ b/tests/components/unifi/test_sensor.py @@ -10,14 +10,12 @@ from aiounifi.models.device import DeviceState from aiounifi.models.message import MessageKey from freezegun.api import FrozenDateTimeFactory, freeze_time import pytest -from syrupy.assertion import SnapshotAssertion +from syrupy import SnapshotAssertion from homeassistant.components.sensor import ( - ATTR_STATE_CLASS, DOMAIN as SENSOR_DOMAIN, SCAN_INTERVAL, SensorDeviceClass, - SensorStateClass, ) from homeassistant.components.unifi.const import ( CONF_ALLOW_BANDWIDTH_SENSORS, @@ -31,10 +29,9 @@ from homeassistant.components.unifi.const import ( from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( ATTR_DEVICE_CLASS, - ATTR_FRIENDLY_NAME, - ATTR_UNIT_OF_MEASUREMENT, STATE_UNAVAILABLE, EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -47,7 +44,26 @@ from .conftest import ( WebsocketStateManager, ) -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform + +WIRED_CLIENT = { + "hostname": "Wired client", + "is_wired": True, + "mac": "00:00:00:00:00:01", + "oui": "Producer", + "wired-rx_bytes-r": 1234000000, + "wired-tx_bytes-r": 5678000000, + "uptime": 1600094505, +} +WIRELESS_CLIENT = { + "is_wired": False, + "mac": "00:00:00:00:00:02", + "name": "Wireless client", + "oui": "Producer", + "rx_bytes-r": 2345000000.0, + "tx_bytes-r": 6789000000.0, + "uptime": 60, +} DEVICE_1 = { "board_rev": 2, @@ -321,6 +337,114 @@ PDU_OUTLETS_UPDATE_DATA = [ ] +@pytest.mark.parametrize( + "config_entry_options", + [ + { + CONF_ALLOW_BANDWIDTH_SENSORS: True, + CONF_ALLOW_UPTIME_SENSORS: True, + } + ], +) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) +@pytest.mark.parametrize( + "device_payload", + [ + [ + DEVICE_1, + PDU_DEVICE_1, + { # Temperature + "board_rev": 3, + "device_id": "mock-id", + "general_temperature": 30, + "has_fan": True, + "has_temperature": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "20:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + }, + { # Latency monitors + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "uptime_stats": { + "WAN": { + "availability": 100.0, + "latency_average": 39, + "monitors": [ + { + "availability": 100.0, + "latency_average": 56, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 53, + "target": "google.com", + "type": "icmp", + }, + { + "availability": 100.0, + "latency_average": 30, + "target": "1.1.1.1", + "type": "icmp", + }, + ], + }, + "WAN2": { + "monitors": [ + { + "availability": 0.0, + "target": "www.microsoft.com", + "type": "icmp", + }, + { + "availability": 0.0, + "target": "google.com", + "type": "icmp", + }, + {"availability": 0.0, "target": "1.1.1.1", "type": "icmp"}, + ], + }, + }, + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + }, + ] + ], +) +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +@pytest.mark.freeze_time("2021-01-01 01:01:00") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory, + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.SENSOR]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + @pytest.mark.parametrize( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], @@ -342,29 +466,7 @@ async def test_no_clients(hass: HomeAssistant) -> None: } ], ) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes-r": 1234000000, - "wired-tx_bytes-r": 5678000000, - }, - { - "is_wired": False, - "mac": "00:00:00:00:00:02", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes-r": 2345000000.0, - "tx_bytes-r": 6789000000.0, - }, - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) async def test_bandwidth_sensors( hass: HomeAssistant, mock_websocket_message: WebsocketMessageMock, @@ -373,33 +475,8 @@ async def test_bandwidth_sensors( client_payload: list[dict[str, Any]], ) -> None: """Verify that bandwidth sensors are working as expected.""" - assert len(hass.states.async_all()) == 5 - assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 4 - - # Verify sensor attributes and state - - wrx_sensor = hass.states.get("sensor.wired_client_rx") - assert wrx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wrx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wrx_sensor.state == "1234.0" - - wtx_sensor = hass.states.get("sensor.wired_client_tx") - assert wtx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wtx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wtx_sensor.state == "5678.0" - - wlrx_sensor = hass.states.get("sensor.wireless_client_rx") - assert wlrx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wlrx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wlrx_sensor.state == "2345.0" - - wltx_sensor = hass.states.get("sensor.wireless_client_tx") - assert wltx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert wltx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert wltx_sensor.state == "6789.0" - # Verify state update - wireless_client = client_payload[1] + wireless_client = deepcopy(client_payload[1]) wireless_client["rx_bytes-r"] = 3456000000 wireless_client["tx_bytes-r"] = 7891000000 @@ -468,31 +545,7 @@ async def test_bandwidth_sensors( "config_entry_options", [{CONF_ALLOW_BANDWIDTH_SENSORS: True, CONF_ALLOW_UPTIME_SENSORS: True}], ) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes": 1234000000, - "wired-tx_bytes": 5678000000, - "uptime": 1600094505, - }, - { - "is_wired": False, - "mac": "00:00:00:00:00:02", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes": 2345000000, - "tx_bytes": 6789000000, - "uptime": 60, - }, - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT, WIRELESS_CLIENT]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_remove_sensors( @@ -535,7 +588,6 @@ async def test_poe_port_switches( ent_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_poe_power") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -600,7 +652,6 @@ async def test_poe_port_switches( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) async def test_wlan_client_sensors( hass: HomeAssistant, - entity_registry: er.EntityRegistry, config_entry_factory: ConfigEntryFactoryType, mock_websocket_message: WebsocketMessageMock, mock_websocket_state: WebsocketStateManager, @@ -633,14 +684,8 @@ async def test_wlan_client_sensors( assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("sensor.ssid_1") - assert ent_reg_entry.unique_id == "wlan_clients-012345678910111213141516" - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - # Validate state object - ssid_1 = hass.states.get("sensor.ssid_1") - assert ssid_1 is not None - assert ssid_1.state == "1" + assert hass.states.get("sensor.ssid_1").state == "1" # Verify state update - increasing number wireless_client_1 = client_payload[0] @@ -709,7 +754,6 @@ async def test_wlan_client_sensors( @pytest.mark.parametrize( ( "entity_id", - "expected_unique_id", "expected_value", "changed_data", "expected_update_value", @@ -717,21 +761,18 @@ async def test_wlan_client_sensors( [ ( "dummy_usp_pdu_pro_outlet_2_outlet_power", - "outlet_power-01:02:03:04:05:ff_2", "73.827", {"outlet_table": PDU_OUTLETS_UPDATE_DATA}, "123.45", ), ( "dummy_usp_pdu_pro_ac_power_budget", - "ac_power_budget-01:02:03:04:05:ff", "1875.000", None, None, ), ( "dummy_usp_pdu_pro_ac_power_consumption", - "ac_power_conumption-01:02:03:04:05:ff", "201.683", {"outlet_ac_power_consumption": "456.78"}, "456.78", @@ -742,26 +783,18 @@ async def test_wlan_client_sensors( @pytest.mark.usefixtures("config_entry_setup") async def test_outlet_power_readings( hass: HomeAssistant, - entity_registry: er.EntityRegistry, mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], entity_id: str, - expected_unique_id: str, - expected_value: Any, - changed_data: dict | None, - expected_update_value: Any, + expected_value: str, + changed_data: dict[str, Any] | None, + expected_update_value: str | None, ) -> None: """Test the outlet power reporting on PDU devices.""" assert len(hass.states.async_all()) == 13 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 7 - ent_reg_entry = entity_registry.async_get(f"sensor.{entity_id}") - assert ent_reg_entry.unique_id == expected_unique_id - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - - sensor_data = hass.states.get(f"sensor.{entity_id}") - assert sensor_data.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.POWER - assert sensor_data.state == expected_value + assert hass.states.get(f"sensor.{entity_id}").state == expected_value if changed_data is not None: updated_device_data = deepcopy(device_payload[0]) @@ -770,8 +803,7 @@ async def test_outlet_power_readings( mock_websocket_message(message=MessageKey.DEVICE, data=updated_device_data) await hass.async_block_till_done() - sensor_data = hass.states.get(f"sensor.{entity_id}") - assert sensor_data.state == expected_update_value + assert hass.states.get(f"sensor.{entity_id}").state == expected_update_value @pytest.mark.parametrize( @@ -804,17 +836,12 @@ async def test_outlet_power_readings( @pytest.mark.usefixtures("config_entry_setup") async def test_device_temperature( hass: HomeAssistant, - entity_registry: er.EntityRegistry, mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Verify that temperature sensors are working as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 assert hass.states.get("sensor.device_temperature").state == "30" - assert ( - entity_registry.async_get("sensor.device_temperature").entity_category - is EntityCategory.DIAGNOSTIC - ) # Verify new event change temperature device = device_payload[0] @@ -859,10 +886,6 @@ async def test_device_state( ) -> None: """Verify that state sensors are working as expected.""" assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 - assert ( - entity_registry.async_get("sensor.device_state").entity_category - is EntityCategory.DIAGNOSTIC - ) device = device_payload[0] for i in list(map(int, DeviceState)): @@ -890,7 +913,6 @@ async def test_device_state( @pytest.mark.usefixtures("config_entry_setup") async def test_device_system_stats( hass: HomeAssistant, - entity_registry: er.EntityRegistry, mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: @@ -901,16 +923,6 @@ async def test_device_system_stats( assert hass.states.get("sensor.device_cpu_utilization").state == "5.8" assert hass.states.get("sensor.device_memory_utilization").state == "31.1" - assert ( - entity_registry.async_get("sensor.device_cpu_utilization").entity_category - is EntityCategory.DIAGNOSTIC - ) - - assert ( - entity_registry.async_get("sensor.device_memory_utilization").entity_category - is EntityCategory.DIAGNOSTIC - ) - # Verify new event change system-stats device = device_payload[0] device["system-stats"] = {"cpu": 7.7, "mem": 33.3, "uptime": 7316} @@ -997,11 +1009,9 @@ async def test_bandwidth_port_sensors( p1rx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_rx") assert p1rx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert p1rx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC p1tx_reg_entry = entity_registry.async_get("sensor.mock_name_port_1_tx") assert p1tx_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert p1tx_reg_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity entity_registry.async_update_entity( @@ -1028,26 +1038,11 @@ async def test_bandwidth_port_sensors( assert len(hass.states.async_all()) == 9 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 6 - # Verify sensor attributes and state - p1rx_sensor = hass.states.get("sensor.mock_name_port_1_rx") - assert p1rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p1rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p1rx_sensor.state == "0.00921" - - p1tx_sensor = hass.states.get("sensor.mock_name_port_1_tx") - assert p1tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p1tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p1tx_sensor.state == "0.04089" - - p2rx_sensor = hass.states.get("sensor.mock_name_port_2_rx") - assert p2rx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p2rx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p2rx_sensor.state == "0.01229" - - p2tx_sensor = hass.states.get("sensor.mock_name_port_2_tx") - assert p2tx_sensor.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DATA_RATE - assert p2tx_sensor.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - assert p2tx_sensor.state == "0.02892" + # Verify sensor state + assert hass.states.get("sensor.mock_name_port_1_rx").state == "0.00921" + assert hass.states.get("sensor.mock_name_port_1_tx").state == "0.04089" + assert hass.states.get("sensor.mock_name_port_2_rx").state == "0.01229" + assert hass.states.get("sensor.mock_name_port_2_tx").state == "0.02892" # Verify state update device_1 = device_payload[0] @@ -1141,13 +1136,9 @@ async def test_device_client_sensors( ent_reg_entry = entity_registry.async_get("sensor.wired_device_clients") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - assert ent_reg_entry.unique_id == "device_clients-01:00:00:00:00:00" ent_reg_entry = entity_registry.async_get("sensor.wireless_device_clients") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.DIAGNOSTIC - assert ent_reg_entry.unique_id == "device_clients-02:00:00:00:00:00" # Enable entity entity_registry.async_update_entity( @@ -1184,72 +1175,6 @@ async def test_device_client_sensors( assert hass.states.get("sensor.wireless_device_clients").state == "0" -WIRED_CLIENT = { - "hostname": "Wired client", - "is_wired": True, - "mac": "00:00:00:00:00:01", - "oui": "Producer", - "wired-rx_bytes-r": 1234000000, - "wired-tx_bytes-r": 5678000000, - "uptime": 1600094505, -} -WIRELESS_CLIENT = { - "is_wired": False, - "mac": "00:00:00:00:00:01", - "name": "Wireless client", - "oui": "Producer", - "rx_bytes-r": 2345000000.0, - "tx_bytes-r": 6789000000.0, - "uptime": 60, -} - - -@pytest.mark.parametrize( - "config_entry_options", - [ - { - CONF_ALLOW_BANDWIDTH_SENSORS: True, - CONF_ALLOW_UPTIME_SENSORS: True, - CONF_TRACK_CLIENTS: False, - CONF_TRACK_DEVICES: False, - } - ], -) -@pytest.mark.parametrize( - ("client_payload", "entity_id", "unique_id_prefix"), - [ - ([WIRED_CLIENT], "sensor.wired_client_rx", "rx-"), - ([WIRED_CLIENT], "sensor.wired_client_tx", "tx-"), - ([WIRED_CLIENT], "sensor.wired_client_uptime", "uptime-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_rx", "rx-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_tx", "tx-"), - ([WIRELESS_CLIENT], "sensor.wireless_client_uptime", "uptime-"), - ], -) -@pytest.mark.usefixtures("config_entry_setup") -@pytest.mark.usefixtures("entity_registry_enabled_by_default") -@pytest.mark.freeze_time("2021-01-01 01:01:00") -async def test_sensor_sources( - hass: HomeAssistant, - entity_registry: er.EntityRegistry, - snapshot: SnapshotAssertion, - entity_id: str, - unique_id_prefix: str, -) -> None: - """Test sensor sources and the entity description.""" - ent_reg_entry = entity_registry.async_get(entity_id) - assert ent_reg_entry.unique_id.startswith(unique_id_prefix) - assert ent_reg_entry.unique_id == snapshot - assert ent_reg_entry.entity_category == snapshot - - state = hass.states.get(entity_id) - assert state.attributes.get(ATTR_DEVICE_CLASS) == snapshot - assert state.attributes.get(ATTR_FRIENDLY_NAME) == snapshot - assert state.attributes.get(ATTR_STATE_CLASS) == snapshot - assert state.attributes.get(ATTR_UNIT_OF_MEASUREMENT) == snapshot - assert state.state == snapshot - - async def _test_uptime_entity( hass: HomeAssistant, freezer: FrozenDateTimeFactory, @@ -1306,19 +1231,7 @@ async def _test_uptime_entity( @pytest.mark.parametrize("config_entry_options", [{CONF_ALLOW_UPTIME_SENSORS: True}]) -@pytest.mark.parametrize( - "client_payload", - [ - [ - { - "mac": "00:00:00:00:00:01", - "name": "client1", - "oui": "Producer", - "uptime": 0, - } - ] - ], -) +@pytest.mark.parametrize("client_payload", [[WIRED_CLIENT]]) @pytest.mark.parametrize( ("initial_uptime", "event_uptime", "small_variation_uptime", "new_uptime"), [ @@ -1331,7 +1244,6 @@ async def _test_uptime_entity( @pytest.mark.usefixtures("entity_registry_enabled_by_default") async def test_client_uptime( hass: HomeAssistant, - entity_registry: er.EntityRegistry, freezer: FrozenDateTimeFactory, config_entry_options: MappingProxyType[str, Any], config_entry_factory: ConfigEntryFactoryType, @@ -1349,7 +1261,7 @@ async def test_client_uptime( mock_websocket_message, config_entry_factory, payload=client_payload[0], - entity_id="sensor.client1_uptime", + entity_id="sensor.wired_client_uptime", message_key=MessageKey.CLIENT, initial_uptime=initial_uptime, event_uptime=event_uptime, @@ -1357,18 +1269,13 @@ async def test_client_uptime( new_uptime=new_uptime, ) - assert ( - entity_registry.async_get("sensor.client1_uptime").entity_category - is EntityCategory.DIAGNOSTIC - ) - # Disable option options = deepcopy(config_entry_options) options[CONF_ALLOW_UPTIME_SENSORS] = False hass.config_entries.async_update_entry(config_entry, options=options) await hass.async_block_till_done() - assert hass.states.get("sensor.client1_uptime") is None + assert hass.states.get("sensor.wired_client_uptime") is None # Enable option options = deepcopy(config_entry_options) @@ -1376,34 +1283,10 @@ async def test_client_uptime( hass.config_entries.async_update_entry(config_entry, options=options) await hass.async_block_till_done() - assert hass.states.get("sensor.client1_uptime") + assert hass.states.get("sensor.wired_client_uptime") -@pytest.mark.parametrize( - "device_payload", - [ - [ - { - "board_rev": 3, - "device_id": "mock-id", - "has_fan": True, - "fan_level": 0, - "ip": "10.0.1.1", - "last_seen": 1562600145, - "mac": "00:00:00:00:01:01", - "model": "US16P150", - "name": "Device", - "next_interval": 20, - "overheating": True, - "state": 1, - "type": "usw", - "upgradable": True, - "uptime": 60, - "version": "4.0.42.10433", - } - ] - ], -) +@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) async def test_device_uptime( hass: HomeAssistant, entity_registry: er.EntityRegistry, @@ -1419,7 +1302,7 @@ async def test_device_uptime( mock_websocket_message, config_entry_factory, payload=device_payload[0], - entity_id="sensor.device_uptime", + entity_id="sensor.mock_name_uptime", message_key=MessageKey.DEVICE, initial_uptime=60, event_uptime=240, @@ -1427,11 +1310,6 @@ async def test_device_uptime( new_uptime=60, ) - assert ( - entity_registry.async_get("sensor.device_uptime").entity_category - is EntityCategory.DIAGNOSTIC - ) - @pytest.mark.parametrize( "device_payload", @@ -1495,7 +1373,7 @@ async def test_device_uptime( ], ) @pytest.mark.parametrize( - ("entity_id", "state", "updated_state", "index_to_update"), + ("monitor_id", "state", "updated_state", "index_to_update"), [ # Microsoft ("microsoft_wan", "56", "20", 0), @@ -1511,24 +1389,22 @@ async def test_wan_monitor_latency( entity_registry: er.EntityRegistry, mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], - entity_id: str, + monitor_id: str, state: str, updated_state: str, index_to_update: int, ) -> None: """Verify that wan latency sensors are working as expected.""" + entity_id = f"sensor.mock_name_{monitor_id}_latency" assert len(hass.states.async_all()) == 6 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 - latency_entry = entity_registry.async_get(f"sensor.mock_name_{entity_id}_latency") + latency_entry = entity_registry.async_get(entity_id) assert latency_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert latency_entry.entity_category is EntityCategory.DIAGNOSTIC # Enable entity - entity_registry.async_update_entity( - entity_id=f"sensor.mock_name_{entity_id}_latency", disabled_by=None - ) + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) await hass.async_block_till_done() @@ -1541,13 +1417,8 @@ async def test_wan_monitor_latency( assert len(hass.states.async_all()) == 7 assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 - # Verify sensor attributes and state - latency_entry = hass.states.get(f"sensor.mock_name_{entity_id}_latency") - assert latency_entry.attributes.get(ATTR_DEVICE_CLASS) == SensorDeviceClass.DURATION - assert ( - latency_entry.attributes.get(ATTR_STATE_CLASS) == SensorStateClass.MEASUREMENT - ) - assert latency_entry.state == state + # Verify sensor state + assert hass.states.get(entity_id).state == state # Verify state update device = device_payload[0] @@ -1557,9 +1428,7 @@ async def test_wan_monitor_latency( mock_websocket_message(message=MessageKey.DEVICE, data=device) - assert ( - hass.states.get(f"sensor.mock_name_{entity_id}_latency").state == updated_state - ) + assert hass.states.get(entity_id).state == updated_state @pytest.mark.parametrize( @@ -1655,3 +1524,229 @@ async def test_wan_monitor_latency_with_no_uptime( latency_entry = entity_registry.async_get("sensor.mock_name_google_wan_latency") assert latency_entry is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 3, + "device_id": "mock-id", + "has_fan": True, + "fan_level": 0, + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "overheating": True, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + "temperatures": [ + {"name": "CPU", "type": "cpu", "value": 66.0}, + {"name": "Local", "type": "board", "value": 48.75}, + {"name": "PHY", "type": "board", "value": 50.25}, + ], + } + ] + ], +) +@pytest.mark.parametrize( + ("temperature_id", "state", "updated_state", "index_to_update"), + [ + ("device_cpu", "66.0", "20", 0), + ("device_local", "48.75", "90.64", 1), + ("device_phy", "50.25", "80", 2), + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_temperatures( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_websocket_message, + device_payload: list[dict[str, Any]], + temperature_id: str, + state: str, + updated_state: str, + index_to_update: int, +) -> None: + """Verify that device temperatures sensors are working as expected.""" + + entity_id = f"sensor.device_{temperature_id}_temperature" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get(entity_id) + assert temperature_entity.disabled_by == RegistryEntryDisabler.INTEGRATION + + # Enable entity + entity_registry.async_update_entity(entity_id=entity_id, disabled_by=None) + + await hass.async_block_till_done() + + async_fire_time_changed( + hass, + dt_util.utcnow() + timedelta(seconds=RELOAD_AFTER_UPDATE_DELAY + 1), + ) + await hass.async_block_till_done() + + assert len(hass.states.async_all()) == 7 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 3 + + # Verify sensor state + assert hass.states.get(entity_id).state == state + + # # Verify state update + device = device_payload[0] + device["temperatures"][index_to_update]["value"] = updated_state + + mock_websocket_message(message=MessageKey.DEVICE, data=device) + + assert hass.states.get(entity_id).state == updated_state + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_with_no_temperature( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that device temperature sensors is not created if there is no data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get( + "sensor.device_device_cpu_temperature" + ) + + assert temperature_entity is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 2, + "device_id": "mock-id", + "ip": "10.0.1.1", + "mac": "10:00:00:00:01:01", + "last_seen": 1562600145, + "model": "US16P150", + "name": "mock-name", + "port_overrides": [], + "state": 1, + "type": "usw", + "version": "4.0.42.10433", + "temperatures": [ + {"name": "MEM", "type": "mem", "value": 66.0}, + ], + } + ] + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_with_no_matching_temperatures( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, +) -> None: + """Verify that device temperature sensors is not created if there is no matching data.""" + + assert len(hass.states.async_all()) == 6 + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 2 + + temperature_entity = entity_registry.async_get( + "sensor.device_device_cpu_temperature" + ) + + assert temperature_entity is None + + +@pytest.mark.parametrize( + "device_payload", + [ + [ + { + "board_rev": 3, + "device_id": "device-with-uplink", + "ip": "10.0.1.1", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:01", + "model": "US16P150", + "name": "Device", + "next_interval": 20, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + "uplink": { + "uplink_mac": "00:00:00:00:00:02", + "port_idx": 1, + }, + }, + { + "board_rev": 3, + "device_id": "device-without-uplink", + "ip": "10.0.1.2", + "last_seen": 1562600145, + "mac": "00:00:00:00:01:02", + "model": "US16P150", + "name": "Other Device", + "next_interval": 20, + "state": 1, + "type": "usw", + "upgradable": True, + "uptime": 60, + "version": "4.0.42.10433", + "uplink": {}, + }, + ], + ], +) +@pytest.mark.usefixtures("config_entry_setup") +async def test_device_uplink( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + mock_websocket_message, + device_payload: list[dict[str, Any]], +) -> None: + """Verify that uplink sensors are working as expected.""" + assert len(hass.states.async_entity_ids(SENSOR_DOMAIN)) == 5 + assert hass.states.get("sensor.device_uplink_mac").state == "00:00:00:00:00:02" + assert ( + entity_registry.async_get("sensor.device_uplink_mac").entity_category + is EntityCategory.DIAGNOSTIC + ) + + # Verify new event change temperature + device = device_payload[0] + device["uplink"]["uplink_mac"] = "00:00:00:00:00:03" + mock_websocket_message(message=MessageKey.DEVICE, data=device) + assert hass.states.get("sensor.device_uplink_mac").state == "00:00:00:00:00:03" diff --git a/tests/components/unifi/test_switch.py b/tests/components/unifi/test_switch.py index 6d85437a244..ef93afa7e3e 100644 --- a/tests/components/unifi/test_switch.py +++ b/tests/components/unifi/test_switch.py @@ -3,15 +3,16 @@ from copy import deepcopy from datetime import timedelta from typing import Any +from unittest.mock import patch from aiounifi.models.message import MessageKey import pytest +from syrupy import SnapshotAssertion from homeassistant.components.switch import ( DOMAIN as SWITCH_DOMAIN, SERVICE_TURN_OFF, SERVICE_TURN_ON, - SwitchDeviceClass, ) from homeassistant.components.unifi.const import ( CONF_BLOCK_CLIENT, @@ -23,13 +24,12 @@ from homeassistant.components.unifi.const import ( ) from homeassistant.config_entries import RELOAD_AFTER_UPDATE_DELAY from homeassistant.const import ( - ATTR_DEVICE_CLASS, ATTR_ENTITY_ID, CONF_HOST, STATE_OFF, STATE_ON, STATE_UNAVAILABLE, - EntityCategory, + Platform, ) from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er @@ -43,7 +43,7 @@ from .conftest import ( WebsocketStateManager, ) -from tests.common import MockConfigEntry, async_fire_time_changed +from tests.common import MockConfigEntry, async_fire_time_changed, snapshot_platform from tests.test_util.aiohttp import AiohttpClientMocker CLIENT_1 = { @@ -810,6 +810,34 @@ TRAFFIC_RULE = { } +@pytest.mark.parametrize( + "config_entry_options", [{CONF_BLOCK_CLIENT: [BLOCKED["mac"]]}] +) +@pytest.mark.parametrize("client_payload", [[BLOCKED]]) +@pytest.mark.parametrize("device_payload", [[DEVICE_1, OUTLET_UP1, PDU_DEVICE_1]]) +@pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) +@pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) +@pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) +@pytest.mark.parametrize("wlan_payload", [[WLAN]]) +@pytest.mark.parametrize( + "site_payload", + [[{"desc": "Site name", "name": "site_id", "role": "admin", "_id": "1"}]], +) +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_entity_and_device_data( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + config_entry_factory: ConfigEntryFactoryType, + site_payload: dict[str, Any], + snapshot: SnapshotAssertion, +) -> None: + """Validate entity and device data with and without admin rights.""" + with patch("homeassistant.components.unifi.PLATFORMS", [Platform.SWITCH]): + config_entry = await config_entry_factory() + await snapshot_platform(hass, entity_registry, snapshot, config_entry.entry_id) + + @pytest.mark.parametrize("client_payload", [[CONTROLLER_HOST]]) @pytest.mark.parametrize("device_payload", [[DEVICE_1]]) @pytest.mark.usefixtures("config_entry_setup") @@ -819,18 +847,6 @@ async def test_hub_not_client(hass: HomeAssistant) -> None: assert hass.states.get("switch.cloud_key") is None -@pytest.mark.parametrize("client_payload", [[CLIENT_1]]) -@pytest.mark.parametrize("device_payload", [[DEVICE_1]]) -@pytest.mark.parametrize( - "site_payload", - [[{"desc": "Site name", "name": "site_id", "role": "not admin", "_id": "1"}]], -) -@pytest.mark.usefixtures("config_entry_setup") -async def test_not_admin(hass: HomeAssistant) -> None: - """Test that switch platform only work on an admin account.""" - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 0 - - @pytest.mark.parametrize( "config_entry_options", [ @@ -841,40 +857,17 @@ async def test_not_admin(hass: HomeAssistant) -> None: } ], ) -@pytest.mark.parametrize("client_payload", [[CLIENT_4]]) @pytest.mark.parametrize("clients_all_payload", [[BLOCKED, UNBLOCKED, CLIENT_1]]) @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) async def test_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, config_entry_setup: MockConfigEntry, ) -> None: """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 3 - switch_4 = hass.states.get("switch.poe_client_4") - assert switch_4 is None - - blocked = hass.states.get("switch.block_client_1") - assert blocked is not None - assert blocked.state == "off" - - unblocked = hass.states.get("switch.block_client_2") - assert unblocked is not None - assert unblocked.state == "on" - - dpi_switch = hass.states.get("switch.block_media_streaming") - assert dpi_switch is not None - assert dpi_switch.state == "on" - assert dpi_switch.attributes["icon"] == "mdi:network" - - for entry_id in ("switch.block_client_1", "switch.block_media_streaming"): - assert ( - entity_registry.async_get(entry_id).entity_category is EntityCategory.CONFIG - ) - # Block and unblock client aioclient_mock.clear_requests() aioclient_mock.post( @@ -1038,10 +1031,7 @@ async def test_dpi_switches( """Test the update_items function with some clients.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - dpi_switch = hass.states.get("switch.block_media_streaming") - assert dpi_switch is not None - assert dpi_switch.state == STATE_ON - assert dpi_switch.attributes["icon"] == "mdi:network" + assert hass.states.get("switch.block_media_streaming").state == STATE_ON mock_websocket_message(data=DPI_APP_DISABLED_EVENT) await hass.async_block_till_done() @@ -1118,20 +1108,18 @@ async def test_traffic_rules( traffic_rule_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi traffic rules.""" - assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 # Validate state object - switch_1 = hass.states.get("switch.unifi_network_test_traffic_rule") - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + assert hass.states.get("switch.unifi_network_test_traffic_rule").state == STATE_ON traffic_rule = deepcopy(traffic_rule_payload[0]) # Disable traffic rule aioclient_mock.put( f"https://{config_entry_setup.data[CONF_HOST]}:1234" - f"/v2/api/site/{config_entry_setup.data[CONF_SITE_ID]}/trafficrules/{traffic_rule['_id']}", + f"/v2/api/site/{config_entry_setup.data[CONF_SITE_ID]}" + f"/trafficrules/{traffic_rule['_id']}", ) call_count = aioclient_mock.call_count @@ -1188,10 +1176,7 @@ async def test_outlet_switches( assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == expected_switches # Validate state object - switch_1 = hass.states.get(f"switch.{entity_id}") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.OUTLET + assert hass.states.get(f"switch.{entity_id}").state == STATE_ON # Update state object device_1 = deepcopy(device_payload[0]) @@ -1250,15 +1235,6 @@ async def test_outlet_switches( await hass.async_block_till_done() assert hass.states.get(f"switch.{entity_id}").state == STATE_OFF - # Unload config entry - await hass.config_entries.async_unload(config_entry_setup.entry_id) - assert hass.states.get(f"switch.{entity_id}").state == STATE_UNAVAILABLE - - # Remove config entry - await hass.config_entries.async_remove(config_entry_setup.entry_id) - await hass.async_block_till_done() - assert hass.states.get(f"switch.{entity_id}") is None - @pytest.mark.parametrize( "config_entry_options", @@ -1359,8 +1335,8 @@ async def test_poe_port_switches( hass: HomeAssistant, entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message: WebsocketMessageMock, config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, device_payload: list[dict[str, Any]], ) -> None: """Test PoE port entities work.""" @@ -1368,7 +1344,6 @@ async def test_poe_port_switches( ent_reg_entry = entity_registry.async_get("switch.mock_name_port_1_poe") assert ent_reg_entry.disabled_by == RegistryEntryDisabler.INTEGRATION - assert ent_reg_entry.entity_category is EntityCategory.CONFIG # Enable entity entity_registry.async_update_entity( @@ -1385,10 +1360,7 @@ async def test_poe_port_switches( await hass.async_block_till_done() # Validate state object - switch_1 = hass.states.get("switch.mock_name_port_1_poe") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.OUTLET + assert hass.states.get("switch.mock_name_port_1_poe").state == STATE_ON # Update state object device_1 = deepcopy(device_payload[0]) @@ -1456,24 +1428,16 @@ async def test_poe_port_switches( @pytest.mark.parametrize("wlan_payload", [[WLAN]]) async def test_wlan_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message: WebsocketMessageMock, config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, wlan_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi WLAN availability.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("switch.ssid_1") - assert ent_reg_entry.unique_id == "wlan-012345678910111213141516" - assert ent_reg_entry.entity_category is EntityCategory.CONFIG - # Validate state object - switch_1 = hass.states.get("switch.ssid_1") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + assert hass.states.get("switch.ssid_1").state == STATE_ON # Update state object wlan = deepcopy(wlan_payload[0]) @@ -1512,24 +1476,16 @@ async def test_wlan_switches( @pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) async def test_port_forwarding_switches( hass: HomeAssistant, - entity_registry: er.EntityRegistry, aioclient_mock: AiohttpClientMocker, - mock_websocket_message: WebsocketMessageMock, config_entry_setup: MockConfigEntry, + mock_websocket_message: WebsocketMessageMock, port_forward_payload: list[dict[str, Any]], ) -> None: """Test control of UniFi port forwarding.""" assert len(hass.states.async_entity_ids(SWITCH_DOMAIN)) == 1 - ent_reg_entry = entity_registry.async_get("switch.unifi_network_plex") - assert ent_reg_entry.unique_id == "port_forward-5a32aa4ee4b0412345678911" - assert ent_reg_entry.entity_category is EntityCategory.CONFIG - # Validate state object - switch_1 = hass.states.get("switch.unifi_network_plex") - assert switch_1 is not None - assert switch_1.state == STATE_ON - assert switch_1.attributes.get(ATTR_DEVICE_CLASS) == SwitchDeviceClass.SWITCH + assert hass.states.get("switch.unifi_network_plex").state == STATE_ON # Update state object data = port_forward_payload[0].copy() @@ -1648,6 +1604,7 @@ async def test_updating_unique_id( @pytest.mark.parametrize("dpi_app_payload", [DPI_APPS]) @pytest.mark.parametrize("dpi_group_payload", [DPI_GROUPS]) @pytest.mark.parametrize("port_forward_payload", [[PORT_FORWARD_PLEX]]) +@pytest.mark.parametrize(("traffic_rule_payload"), [([TRAFFIC_RULE])]) @pytest.mark.parametrize("wlan_payload", [[WLAN]]) @pytest.mark.usefixtures("config_entry_setup") @pytest.mark.usefixtures("entity_registry_enabled_by_default") @@ -1661,6 +1618,7 @@ async def test_hub_state_change( "switch.plug_outlet_1", "switch.block_media_streaming", "switch.unifi_network_plex", + "switch.unifi_network_test_traffic_rule", "switch.ssid_1", ) for entity_id in entity_ids: diff --git a/tests/components/unifiprotect/test_config_flow.py b/tests/components/unifiprotect/test_config_flow.py index 5d02e1cf098..8bfdc004092 100644 --- a/tests/components/unifiprotect/test_config_flow.py +++ b/tests/components/unifiprotect/test_config_flow.py @@ -224,13 +224,7 @@ async def test_form_reauth_auth( ) mock_config.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": mock_config.entry_id, - }, - ) + result = await mock_config.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert not result["errors"] flows = hass.config_entries.flow.async_progress_by_handler(DOMAIN) diff --git a/tests/components/universal/test_media_player.py b/tests/components/universal/test_media_player.py index 187b62a93a1..7c992814cfe 100644 --- a/tests/components/universal/test_media_player.py +++ b/tests/components/universal/test_media_player.py @@ -55,7 +55,7 @@ def validate_config(config): class MockMediaPlayer(media_player.MediaPlayerEntity): """Mock media player for testing.""" - def __init__(self, hass, name): + def __init__(self, hass: HomeAssistant, name: str) -> None: """Initialize the media player.""" self.hass = hass self._name = name @@ -220,7 +220,7 @@ class MockMediaPlayer(media_player.MediaPlayerEntity): @pytest.fixture -async def mock_states(hass): +async def mock_states(hass: HomeAssistant) -> Mock: """Set mock states used in tests.""" result = Mock() diff --git a/tests/components/upb/test_config_flow.py b/tests/components/upb/test_config_flow.py index efa6d60c344..59a4e97d22b 100644 --- a/tests/components/upb/test_config_flow.py +++ b/tests/components/upb/test_config_flow.py @@ -5,6 +5,7 @@ from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch from homeassistant import config_entries from homeassistant.components.upb.const import DOMAIN +from homeassistant.config_entries import ConfigFlowResult from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -26,7 +27,9 @@ def mocked_upb(sync_complete=True, config_ok=True): ) -async def valid_tcp_flow(hass, sync_complete=True, config_ok=True): +async def valid_tcp_flow( + hass: HomeAssistant, sync_complete: bool = True, config_ok: bool = True +) -> ConfigFlowResult: """Get result dict that are standard for most tests.""" with ( @@ -111,42 +114,3 @@ async def test_form_user_with_already_configured(hass: HomeAssistant) -> None: assert result2["type"] is FlowResultType.ABORT assert result2["reason"] == "already_configured" await hass.async_block_till_done() - - -async def test_form_import(hass: HomeAssistant) -> None: - """Test we get the form with import source.""" - - with ( - mocked_upb(), - patch( - "homeassistant.components.upb.async_setup_entry", return_value=True - ) as mock_setup_entry, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"host": "tcp://42.4.2.42", "file_path": "upb.upe"}, - ) - await hass.async_block_till_done() - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == "UPB" - - assert result["data"] == {"host": "tcp://42.4.2.42", "file_path": "upb.upe"} - assert len(mock_setup_entry.mock_calls) == 1 - - -async def test_form_junk_input(hass: HomeAssistant) -> None: - """Test we get the form with import source.""" - - with mocked_upb(): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"foo": "goo", "goo": "foo"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["errors"] == {"base": "unknown"} - - await hass.async_block_till_done() diff --git a/tests/components/upnp/conftest.py b/tests/components/upnp/conftest.py index 1431ce2c9ef..4bee5c0e589 100644 --- a/tests/components/upnp/conftest.py +++ b/tests/components/upnp/conftest.py @@ -2,10 +2,11 @@ from __future__ import annotations -from collections.abc import Generator +from collections.abc import Callable, Coroutine, Generator import copy from datetime import datetime import socket +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch from urllib.parse import urlparse @@ -175,7 +176,13 @@ async def ssdp_instant_discovery(): """Instant discovery.""" # Set up device discovery callback. - async def register_callback(hass, callback, match_dict): + async def register_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: """Immediately do callback.""" await callback(TEST_DISCOVERY, ssdp.SsdpChange.ALIVE) return MagicMock() @@ -202,7 +209,13 @@ async def ssdp_instant_discovery_multi_location(): test_discovery.ssdp_all_locations = {TEST_LOCATION6, TEST_LOCATION} # Set up device discovery callback. - async def register_callback(hass, callback, match_dict): + async def register_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: """Immediately do callback.""" await callback(test_discovery, ssdp.SsdpChange.ALIVE) return MagicMock() @@ -225,7 +238,13 @@ async def ssdp_no_discovery(): """No discovery.""" # Set up device discovery callback. - async def register_callback(hass, callback, match_dict): + async def register_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: """Don't do callback.""" return MagicMock() diff --git a/tests/components/upnp/test_init.py b/tests/components/upnp/test_init.py index f87696b0bd1..0e8551dd8a1 100644 --- a/tests/components/upnp/test_init.py +++ b/tests/components/upnp/test_init.py @@ -2,7 +2,9 @@ from __future__ import annotations +from collections.abc import Callable, Coroutine import copy +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from async_upnp_client.profiles.igd import IgdDevice @@ -140,7 +142,13 @@ async def test_async_setup_udn_mismatch( ) # Set up device discovery callback. - async def register_callback(hass, callback, match_dict): + async def register_callback( + hass: HomeAssistant, + callback: Callable[ + [ssdp.SsdpServiceInfo, ssdp.SsdpChange], Coroutine[Any, Any, None] | None + ], + match_dict: dict[str, str] | None = None, + ) -> MagicMock: """Immediately do callback.""" await callback(test_discovery, ssdp.SsdpChange.ALIVE) return MagicMock() diff --git a/tests/components/uptime/snapshots/test_sensor.ambr b/tests/components/uptime/snapshots/test_sensor.ambr index fa0cb6bf8a9..561e4b83320 100644 --- a/tests/components/uptime/snapshots/test_sensor.ambr +++ b/tests/components/uptime/snapshots/test_sensor.ambr @@ -71,25 +71,3 @@ 'via_device_id': None, }) # --- -# name: test_uptime_sensor.3 - DeviceRegistryEntrySnapshot({ - 'area_id': None, - 'config_entries': , - 'configuration_url': None, - 'connections': set({ - }), - 'disabled_by': None, - 'entry_type': , - 'hw_version': None, - 'id': , - 'is_new': False, - 'manufacturer': None, - 'model': None, - 'name': 'Uptime', - 'name_by_user': None, - 'serial_number': None, - 'suggested_area': None, - 'sw_version': None, - 'via_device_id': None, - }) -# --- diff --git a/tests/components/uptimerobot/test_config_flow.py b/tests/components/uptimerobot/test_config_flow.py index 1cf0a358a87..3ba5ad696a6 100644 --- a/tests/components/uptimerobot/test_config_flow.py +++ b/tests/components/uptimerobot/test_config_flow.py @@ -168,15 +168,7 @@ async def test_reauthentication( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -209,15 +201,7 @@ async def test_reauthentication_failure( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -253,15 +237,7 @@ async def test_reauthentication_failure_no_existing_entry( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None @@ -294,15 +270,7 @@ async def test_reauthentication_failure_account_not_matching( old_entry = MockConfigEntry(**MOCK_UPTIMEROBOT_CONFIG_ENTRY_DATA) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["errors"] is None diff --git a/tests/components/vacuum/common.py b/tests/components/vacuum/common.py index 0e46ebf5e44..6228c1d2f74 100644 --- a/tests/components/vacuum/common.py +++ b/tests/components/vacuum/common.py @@ -4,6 +4,8 @@ All containing methods are legacy helpers that should not be used by new components. Instead call the service directly. """ +from typing import Any + from homeassistant.components.vacuum import ( ATTR_FAN_SPEED, ATTR_PARAMS, @@ -26,136 +28,149 @@ from homeassistant.const import ( SERVICE_TURN_OFF, SERVICE_TURN_ON, ) +from homeassistant.core import HomeAssistant from homeassistant.loader import bind_hass @bind_hass -def turn_on(hass, entity_id=ENTITY_MATCH_ALL): +def turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified vacuum on.""" hass.add_job(async_turn_on, hass, entity_id) -async def async_turn_on(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_on(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified vacuum on.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_ON, data, blocking=True) @bind_hass -def turn_off(hass, entity_id=ENTITY_MATCH_ALL): +def turn_off(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Turn all or specified vacuum off.""" hass.add_job(async_turn_off, hass, entity_id) -async def async_turn_off(hass, entity_id=ENTITY_MATCH_ALL): +async def async_turn_off( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified vacuum off.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TURN_OFF, data, blocking=True) @bind_hass -def toggle(hass, entity_id=ENTITY_MATCH_ALL): +def toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Toggle all or specified vacuum.""" hass.add_job(async_toggle, hass, entity_id) -async def async_toggle(hass, entity_id=ENTITY_MATCH_ALL): +async def async_toggle(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Toggle all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_TOGGLE, data, blocking=True) @bind_hass -def locate(hass, entity_id=ENTITY_MATCH_ALL): +def locate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Locate all or specified vacuum.""" hass.add_job(async_locate, hass, entity_id) -async def async_locate(hass, entity_id=ENTITY_MATCH_ALL): +async def async_locate(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Locate all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_LOCATE, data, blocking=True) @bind_hass -def clean_spot(hass, entity_id=ENTITY_MATCH_ALL): +def clean_spot(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to perform a spot clean-up.""" hass.add_job(async_clean_spot, hass, entity_id) -async def async_clean_spot(hass, entity_id=ENTITY_MATCH_ALL): +async def async_clean_spot( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Tell all or specified vacuum to perform a spot clean-up.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_CLEAN_SPOT, data, blocking=True) @bind_hass -def return_to_base(hass, entity_id=ENTITY_MATCH_ALL): +def return_to_base(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to return to base.""" hass.add_job(async_return_to_base, hass, entity_id) -async def async_return_to_base(hass, entity_id=ENTITY_MATCH_ALL): +async def async_return_to_base( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Tell all or specified vacuum to return to base.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_RETURN_TO_BASE, data, blocking=True) @bind_hass -def start_pause(hass, entity_id=ENTITY_MATCH_ALL): +def start_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to start or pause the current task.""" hass.add_job(async_start_pause, hass, entity_id) -async def async_start_pause(hass, entity_id=ENTITY_MATCH_ALL): +async def async_start_pause( + hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Tell all or specified vacuum to start or pause the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_START_PAUSE, data, blocking=True) @bind_hass -def start(hass, entity_id=ENTITY_MATCH_ALL): +def start(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to start or resume the current task.""" hass.add_job(async_start, hass, entity_id) -async def async_start(hass, entity_id=ENTITY_MATCH_ALL): +async def async_start(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or specified vacuum to start or resume the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_START, data, blocking=True) @bind_hass -def pause(hass, entity_id=ENTITY_MATCH_ALL): +def pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or the specified vacuum to pause the current task.""" hass.add_job(async_pause, hass, entity_id) -async def async_pause(hass, entity_id=ENTITY_MATCH_ALL): +async def async_pause(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Tell all or the specified vacuum to pause the current task.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_PAUSE, data, blocking=True) @bind_hass -def stop(hass, entity_id=ENTITY_MATCH_ALL): +def stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Stop all or specified vacuum.""" hass.add_job(async_stop, hass, entity_id) -async def async_stop(hass, entity_id=ENTITY_MATCH_ALL): +async def async_stop(hass: HomeAssistant, entity_id: str = ENTITY_MATCH_ALL) -> None: """Stop all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else None await hass.services.async_call(DOMAIN, SERVICE_STOP, data, blocking=True) @bind_hass -def set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL): +def set_fan_speed( + hass: HomeAssistant, fan_speed: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set fan speed for all or specified vacuum.""" hass.add_job(async_set_fan_speed, hass, fan_speed, entity_id) -async def async_set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL): +async def async_set_fan_speed( + hass: HomeAssistant, fan_speed: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set fan speed for all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_FAN_SPEED] = fan_speed @@ -163,12 +178,22 @@ async def async_set_fan_speed(hass, fan_speed, entity_id=ENTITY_MATCH_ALL): @bind_hass -def send_command(hass, command, params=None, entity_id=ENTITY_MATCH_ALL): +def send_command( + hass: HomeAssistant, + command: str, + params: dict[str, Any] | list[Any] | None = None, + entity_id: str = ENTITY_MATCH_ALL, +) -> None: """Send command to all or specified vacuum.""" hass.add_job(async_send_command, hass, command, params, entity_id) -async def async_send_command(hass, command, params=None, entity_id=ENTITY_MATCH_ALL): +async def async_send_command( + hass: HomeAssistant, + command: str, + params: dict[str, Any] | list[Any] | None = None, + entity_id: str = ENTITY_MATCH_ALL, +) -> None: """Send command to all or specified vacuum.""" data = {ATTR_ENTITY_ID: entity_id} if entity_id else {} data[ATTR_COMMAND] = command diff --git a/tests/components/valve/test_init.py b/tests/components/valve/test_init.py index e4519bcef08..378ddb2a94b 100644 --- a/tests/components/valve/test_init.py +++ b/tests/components/valve/test_init.py @@ -332,7 +332,9 @@ async def test_supported_features(hass: HomeAssistant) -> None: assert valve.supported_features is None -def call_service(hass, service, ent, position=None): +def call_service( + hass: HomeAssistant, service: str, ent: ValveEntity, position: int | None = None +): """Call any service on entity.""" params = {ATTR_ENTITY_ID: ent.entity_id} if position is not None: @@ -345,21 +347,21 @@ def set_valve_position(ent, position) -> None: ent._values["current_valve_position"] = position -def is_open(hass, ent): +def is_open(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_OPEN) -def is_opening(hass, ent): +def is_opening(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_OPENING) -def is_closed(hass, ent): +def is_closed(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_CLOSED) -def is_closing(hass, ent): +def is_closing(hass: HomeAssistant, ent: ValveEntity) -> bool: """Return if the valve is closed based on the statemachine.""" return hass.states.is_state(ent.entity_id, STATE_CLOSING) diff --git a/tests/components/velux/test_config_flow.py b/tests/components/velux/test_config_flow.py index 8021ad52810..5f7932d358a 100644 --- a/tests/components/velux/test_config_flow.py +++ b/tests/components/velux/test_config_flow.py @@ -10,7 +10,7 @@ import pytest from pyvlx import PyVLXException from homeassistant.components.velux import DOMAIN -from homeassistant.config_entries import SOURCE_IMPORT, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -69,22 +69,8 @@ async def test_user_errors( assert result["errors"] == {"base": error_name} -async def test_import_valid_config(hass: HomeAssistant) -> None: - """Test import initialized flow with valid config.""" - with patch(PYVLX_CONFIG_FLOW_CLASS_PATH, autospec=True): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=DUMMY_DATA, - ) - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == DUMMY_DATA[CONF_HOST] - assert result["data"] == DUMMY_DATA - - -@pytest.mark.parametrize("flow_source", [SOURCE_IMPORT, SOURCE_USER]) -async def test_flow_duplicate_entry(hass: HomeAssistant, flow_source: str) -> None: - """Test import initialized flow with a duplicate entry.""" +async def test_flow_duplicate_entry(hass: HomeAssistant) -> None: + """Test initialized flow with a duplicate entry.""" with patch(PYVLX_CONFIG_FLOW_CLASS_PATH, autospec=True): conf_entry: MockConfigEntry = MockConfigEntry( domain=DOMAIN, title=DUMMY_DATA[CONF_HOST], data=DUMMY_DATA @@ -94,26 +80,8 @@ async def test_flow_duplicate_entry(hass: HomeAssistant, flow_source: str) -> No result = await hass.config_entries.flow.async_init( DOMAIN, - context={"source": flow_source}, + context={"source": SOURCE_USER}, data=DUMMY_DATA, ) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "already_configured" - - -@pytest.mark.parametrize(("error", "error_name"), error_types_to_test) -async def test_import_errors( - hass: HomeAssistant, error: Exception, error_name: str -) -> None: - """Test import initialized flow with exceptions.""" - with patch( - PYVLX_CONFIG_FLOW_CONNECT_FUNCTION_PATH, - side_effect=error, - ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_IMPORT}, - data=DUMMY_DATA, - ) - assert result["type"] is FlowResultType.ABORT - assert result["reason"] == error_name diff --git a/tests/components/venstar/__init__.py b/tests/components/venstar/__init__.py index 116a3be0925..6a40212b793 100644 --- a/tests/components/venstar/__init__.py +++ b/tests/components/venstar/__init__.py @@ -15,7 +15,7 @@ class VenstarColorTouchMock: pin=None, proto="http", SSLCert=False, - ): + ) -> None: """Initialize the Venstar library.""" self.status = {} self.model = "COLORTOUCH" diff --git a/tests/components/venstar/util.py b/tests/components/venstar/util.py index 369d3332135..f1e85e9019e 100644 --- a/tests/components/venstar/util.py +++ b/tests/components/venstar/util.py @@ -15,7 +15,7 @@ TEST_MODELS = ["t2k", "colortouch"] def mock_venstar_devices(f): """Decorate function to mock a Venstar Colortouch and T2000 thermostat API.""" - async def wrapper(hass): + async def wrapper(hass: HomeAssistant) -> None: # Mock thermostats are: # Venstar T2000, FW 4.38 # Venstar "colortouch" T7850, FW 5.1 @@ -37,7 +37,7 @@ def mock_venstar_devices(f): f"http://venstar-{model}.localdomain/query/alerts", text=load_fixture(f"venstar/{model}_alerts.json"), ) - return await f(hass) + await f(hass) return wrapper diff --git a/tests/components/vera/common.py b/tests/components/vera/common.py index 5e0fac6c84a..c5e3a5d4931 100644 --- a/tests/components/vera/common.py +++ b/tests/components/vera/common.py @@ -83,7 +83,7 @@ def new_simple_controller_config( class ComponentFactory: """Factory class.""" - def __init__(self, vera_controller_class_mock): + def __init__(self, vera_controller_class_mock) -> None: """Initialize the factory.""" self.vera_controller_class_mock = vera_controller_class_mock diff --git a/tests/components/verisure/test_config_flow.py b/tests/components/verisure/test_config_flow.py index cf478b093c0..e6dd11669d1 100644 --- a/tests/components/verisure/test_config_flow.py +++ b/tests/components/verisure/test_config_flow.py @@ -352,15 +352,7 @@ async def test_reauth_flow( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("step_id") == "reauth_confirm" assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} @@ -395,15 +387,7 @@ async def test_reauth_flow_with_mfa( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) assert result.get("step_id") == "reauth_confirm" assert result.get("type") is FlowResultType.FORM assert result.get("errors") == {} @@ -466,15 +450,7 @@ async def test_reauth_flow_errors( """Test a reauthentication flow.""" mock_config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_config_entry.unique_id, - "entry_id": mock_config_entry.entry_id, - }, - data=mock_config_entry.data, - ) + result = await mock_config_entry.start_reauth_flow(hass) mock_verisure_config_flow.login.side_effect = side_effect result2 = await hass.config_entries.flow.async_configure( diff --git a/tests/components/vicare/__init__.py b/tests/components/vicare/__init__.py index 329a3b04d58..c2a1ab49e5c 100644 --- a/tests/components/vicare/__init__.py +++ b/tests/components/vicare/__init__.py @@ -6,6 +6,9 @@ from typing import Final from homeassistant.components.vicare.const import CONF_HEATING_TYPE from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME +from homeassistant.core import HomeAssistant + +from tests.common import MockConfigEntry MODULE = "homeassistant.components.vicare" @@ -17,3 +20,11 @@ ENTRY_CONFIG: Final[dict[str, str]] = { } MOCK_MAC = "B874241B7B9" + + +async def setup_integration(hass: HomeAssistant, config_entry: MockConfigEntry) -> None: + """Fixture for setting up the component.""" + config_entry.add_to_hass(hass) + + await hass.config_entries.async_setup(config_entry.entry_id) + await hass.async_block_till_done() diff --git a/tests/components/vicare/conftest.py b/tests/components/vicare/conftest.py index 372314d9fe2..c78669d1c3e 100644 --- a/tests/components/vicare/conftest.py +++ b/tests/components/vicare/conftest.py @@ -13,7 +13,7 @@ from PyViCare.PyViCareService import ViCareDeviceAccessor, readFeature from homeassistant.components.vicare.const import DOMAIN from homeassistant.core import HomeAssistant -from . import ENTRY_CONFIG, MODULE +from . import ENTRY_CONFIG, MODULE, setup_integration from tests.common import MockConfigEntry, load_json_object_fixture @@ -40,7 +40,7 @@ class MockPyViCare: ), f"deviceId{idx}", f"model{idx}", - f"online{idx}", + "online", ) ) @@ -87,10 +87,7 @@ async def mock_vicare_gas_boiler( f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures), ): - mock_config_entry.add_to_hass(hass) - - await hass.config_entries.async_setup(mock_config_entry.entry_id) - await hass.async_block_till_done() + await setup_integration(hass, mock_config_entry) yield mock_config_entry diff --git a/tests/components/vicare/fixtures/ViAir300F.json b/tests/components/vicare/fixtures/ViAir300F.json new file mode 100644 index 00000000000..b1ec747e127 --- /dev/null +++ b/tests/components/vicare/fixtures/ViAir300F.json @@ -0,0 +1,882 @@ +{ + "data": [ + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.productIdentification", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "product": { + "type": "object", + "value": { + "busAddress": 1, + "busType": "CanExternal", + "productFamily": "B_00028_VA330", + "viessmannIdentificationNumber": "################" + } + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.productIdentification" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.messages.errors.raw", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "entries": { + "type": "array", + "value": [] + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.messages.errors.raw" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "################" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.levels.levelFour", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 234 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelFour" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.levels.levelOne", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 54 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelOne" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.levels.levelThree", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 180 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelThree" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.levels.levelTwo", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 125 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.levels.levelTwo" + }, + { + "apiVersion": 1, + "commands": { + "setMode": { + "isExecutable": true, + "name": "setMode", + "params": { + "mode": { + "constraints": { + "enum": [ + "permanent", + "ventilation", + "sensorOverride", + "sensorDriven" + ] + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active/commands/setMode" + }, + "setModeContinuousSensorOverride": { + "isExecutable": "true", + "name": "setModeContinuousSensorOverride", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active/commands/setModeContinuousSensorOverride" + } + }, + "deviceId": "0", + "feature": "ventilation.operating.modes.active", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "permanent" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.active" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.modes.filterChange", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.filterChange" + }, + { + "apiVersion": 1, + "commands": { + "setLevel": { + "isExecutable": true, + "name": "setLevel", + "params": { + "level": { + "constraints": { + "enum": ["levelOne", "levelTwo", "levelThree", "levelFour"] + }, + "required": true, + "type": "string" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.permanent/commands/setLevel" + } + }, + "deviceId": "0", + "feature": "ventilation.operating.modes.permanent", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.permanent" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.modes.sensorDriven", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.sensorDriven" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.modes.sensorOverride", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.sensorOverride" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.modes.ventilation", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.modes.ventilation" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.active", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "levelOne" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.active" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/deactivate" + }, + "setDefaultRuntime": { + "isExecutable": true, + "name": "setDefaultRuntime", + "params": { + "defaultRuntime": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/setDefaultRuntime" + }, + "setTimeout": { + "isExecutable": true, + "name": "setTimeout", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour/commands/setTimeout" + } + }, + "deviceId": "0", + "feature": "ventilation.operating.programs.forcedLevelFour", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "defaultRuntime": { + "type": "number", + "unit": "minutes", + "value": 30 + }, + "isActiveWritable": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.forcedLevelFour" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.levelFour", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 234 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelFour" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.levelOne", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": true + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 54 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelOne" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.levelThree", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 180 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelThree" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.levelTwo", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 125 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.levelTwo" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/deactivate" + }, + "setDefaultRuntime": { + "isExecutable": true, + "name": "setDefaultRuntime", + "params": { + "defaultRuntime": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/setDefaultRuntime" + }, + "setTimeout": { + "isExecutable": true, + "name": "setTimeout", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent/commands/setTimeout" + } + }, + "deviceId": "0", + "feature": "ventilation.operating.programs.silent", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "defaultRuntime": { + "type": "number", + "unit": "minutes", + "value": 30 + }, + "isActiveWritable": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.silent" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.programs.standby", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "volumeFlow": { + "type": "number", + "unit": "cubicMeter/hour", + "value": 0 + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.programs.standby" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "ventilation.operating.state", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "demand": { + "type": "string", + "value": "ventilation" + }, + "level": { + "type": "string", + "value": "levelOne" + }, + "reason": { + "type": "string", + "value": "permanent" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.operating.state" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/deactivate" + }, + "setDefaultRuntime": { + "isExecutable": true, + "name": "setDefaultRuntime", + "params": { + "defaultRuntime": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/setDefaultRuntime" + }, + "setTimeout": { + "isExecutable": true, + "name": "setTimeout", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour/commands/setTimeout" + } + }, + "deviceId": "0", + "feature": "ventilation.quickmodes.forcedLevelFour", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "defaultRuntime": { + "type": "number", + "unit": "minutes", + "value": 30 + }, + "isActiveWritable": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.forcedLevelFour" + }, + { + "apiVersion": 1, + "commands": { + "activate": { + "isExecutable": true, + "name": "activate", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": false, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/activate" + }, + "deactivate": { + "isExecutable": true, + "name": "deactivate", + "params": {}, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/deactivate" + }, + "setDefaultRuntime": { + "isExecutable": true, + "name": "setDefaultRuntime", + "params": { + "defaultRuntime": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/setDefaultRuntime" + }, + "setTimeout": { + "isExecutable": true, + "name": "setTimeout", + "params": { + "timeout": { + "constraints": { + "max": 1440, + "min": 1, + "stepping": 1 + }, + "required": true, + "type": "number" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent/commands/setTimeout" + } + }, + "deviceId": "0", + "feature": "ventilation.quickmodes.silent", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "defaultRuntime": { + "type": "number", + "unit": "minutes", + "value": 30 + }, + "isActiveWritable": { + "type": "boolean", + "value": true + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.quickmodes.silent" + }, + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "heating.boiler.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "################" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/heating.boiler.serial" + }, + { + "apiVersion": 1, + "commands": { + "setSchedule": { + "isExecutable": true, + "name": "setSchedule", + "params": { + "newSchedule": { + "constraints": { + "defaultMode": "levelOne", + "maxEntries": 4, + "modes": ["levelTwo", "levelThree"], + "overlapAllowed": false, + "resolution": 10 + }, + "required": true, + "type": "Schedule" + } + }, + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.schedule/commands/setSchedule" + } + }, + "deviceId": "0", + "feature": "ventilation.schedule", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "active": { + "type": "boolean", + "value": false + }, + "entries": { + "type": "Schedule", + "value": { + "fri": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "mon": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "sat": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "sun": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "thu": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "tue": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ], + "wed": [ + { + "end": "22:00", + "mode": "levelTwo", + "position": 0, + "start": "06:00" + } + ] + } + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/ventilation.schedule" + } + ] +} diff --git a/tests/components/vicare/fixtures/Vitodens300W.json b/tests/components/vicare/fixtures/Vitodens300W.json index 4cf67ebe0f7..bb86bda981b 100644 --- a/tests/components/vicare/fixtures/Vitodens300W.json +++ b/tests/components/vicare/fixtures/Vitodens300W.json @@ -1,5 +1,22 @@ { "data": [ + { + "apiVersion": 1, + "commands": {}, + "deviceId": "0", + "feature": "device.serial", + "gatewayId": "################", + "isEnabled": true, + "isReady": true, + "properties": { + "value": { + "type": "string", + "value": "################" + } + }, + "timestamp": "2024-03-20T01:29:35.549Z", + "uri": "https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial" + }, { "properties": {}, "commands": {}, diff --git a/tests/components/vicare/snapshots/test_binary_sensor.ambr b/tests/components/vicare/snapshots/test_binary_sensor.ambr index 7454f914435..a03a6150c45 100644 --- a/tests/components/vicare/snapshots/test_binary_sensor.ambr +++ b/tests/components/vicare/snapshots/test_binary_sensor.ambr @@ -1,4 +1,378 @@ # serializer version: 1 +# name: test_all_entities[binary_sensor.model0_burner-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_burner', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Burner', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'burner', + 'unique_id': 'gateway0-burner_active-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_burner-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 Burner', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_burner', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_circulation_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_circulation_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Circulation pump', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'circulation_pump', + 'unique_id': 'gateway0-circulationpump_active-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_circulation_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 Circulation pump', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_circulation_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_circulation_pump_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_circulation_pump_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Circulation pump', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'circulation_pump', + 'unique_id': 'gateway0-circulationpump_active-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_circulation_pump_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 Circulation pump', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_circulation_pump_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_charging-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_dhw_charging', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW charging', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'domestic_hot_water_charging', + 'unique_id': 'gateway0-charging_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_charging-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 DHW charging', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_dhw_charging', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_circulation_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_dhw_circulation_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW circulation pump', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'domestic_hot_water_circulation_pump', + 'unique_id': 'gateway0-dhw_circulationpump_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_circulation_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 DHW circulation pump', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_dhw_circulation_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_pump-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_dhw_pump', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW pump', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'domestic_hot_water_pump', + 'unique_id': 'gateway0-dhw_pump_active', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_dhw_pump-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'running', + 'friendly_name': 'model0 DHW pump', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_dhw_pump', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_frost_protection-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_frost_protection', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Frost protection', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frost_protection', + 'unique_id': 'gateway0-frost_protection_active-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_frost_protection-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Frost protection', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_frost_protection', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[binary_sensor.model0_frost_protection_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'binary_sensor', + 'entity_category': None, + 'entity_id': 'binary_sensor.model0_frost_protection_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Frost protection', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'frost_protection', + 'unique_id': 'gateway0-frost_protection_active-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[binary_sensor.model0_frost_protection_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Frost protection', + }), + 'context': , + 'entity_id': 'binary_sensor.model0_frost_protection_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- # name: test_binary_sensors[burner] StateSnapshot({ 'attributes': ReadOnlyDict({ diff --git a/tests/components/vicare/snapshots/test_button.ambr b/tests/components/vicare/snapshots/test_button.ambr new file mode 100644 index 00000000000..01120b8b0d6 --- /dev/null +++ b/tests/components/vicare/snapshots/test_button.ambr @@ -0,0 +1,47 @@ +# serializer version: 1 +# name: test_all_entities[button.model0_activate_one_time_charge-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': None, + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'button', + 'entity_category': , + 'entity_id': 'button.model0_activate_one_time_charge', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Activate one-time charge', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'activate_onetimecharge', + 'unique_id': 'gateway0-activate_onetimecharge', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[button.model0_activate_one_time_charge-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Activate one-time charge', + }), + 'context': , + 'entity_id': 'button.model0_activate_one_time_charge', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_climate.ambr b/tests/components/vicare/snapshots/test_climate.ambr new file mode 100644 index 00000000000..a01d1c43bea --- /dev/null +++ b/tests/components/vicare/snapshots/test_climate.ambr @@ -0,0 +1,167 @@ +# serializer version: 1 +# name: test_all_entities[climate.model0_heating-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + ]), + 'max_temp': 37, + 'min_temp': 3, + 'preset_modes': list([ + 'comfort', + 'eco', + 'home', + 'sleep', + ]), + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.model0_heating', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'heating', + 'unique_id': 'gateway0-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.model0_heating-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'model0 Heating', + 'hvac_action': , + 'hvac_modes': list([ + ]), + 'max_temp': 37, + 'min_temp': 3, + 'preset_mode': None, + 'preset_modes': list([ + 'comfort', + 'eco', + 'home', + 'sleep', + ]), + 'supported_features': , + 'target_temp_step': 1, + 'temperature': None, + 'vicare_programs': list([ + 'comfort', + 'eco', + 'external', + 'holiday', + 'normal', + 'reduced', + 'standby', + ]), + }), + 'context': , + 'entity_id': 'climate.model0_heating', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[climate.model0_heating_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'hvac_modes': list([ + ]), + 'max_temp': 37, + 'min_temp': 3, + 'preset_modes': list([ + 'comfort', + 'eco', + 'home', + 'sleep', + ]), + 'target_temp_step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'climate', + 'entity_category': None, + 'entity_id': 'climate.model0_heating_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'heating', + 'unique_id': 'gateway0-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[climate.model0_heating_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'model0 Heating', + 'hvac_action': , + 'hvac_modes': list([ + ]), + 'max_temp': 37, + 'min_temp': 3, + 'preset_mode': None, + 'preset_modes': list([ + 'comfort', + 'eco', + 'home', + 'sleep', + ]), + 'supported_features': , + 'target_temp_step': 1, + 'temperature': None, + 'vicare_programs': list([ + 'comfort', + 'eco', + 'external', + 'holiday', + 'normal', + 'reduced', + 'standby', + ]), + }), + 'context': , + 'entity_id': 'climate.model0_heating_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_diagnostics.ambr b/tests/components/vicare/snapshots/test_diagnostics.ambr index dfc29d46cc2..430b2de35ad 100644 --- a/tests/components/vicare/snapshots/test_diagnostics.ambr +++ b/tests/components/vicare/snapshots/test_diagnostics.ambr @@ -4,6 +4,24 @@ 'data': list([ dict({ 'data': list([ + dict({ + 'apiVersion': 1, + 'commands': dict({ + }), + 'deviceId': '0', + 'feature': 'device.serial', + 'gatewayId': '################', + 'isEnabled': True, + 'isReady': True, + 'properties': dict({ + 'value': dict({ + 'type': 'string', + 'value': '################', + }), + }), + 'timestamp': '2024-03-20T01:29:35.549Z', + 'uri': 'https://api.viessmann.com/iot/v1/features/installations/#######/gateways/################/devices/0/features/device.serial', + }), dict({ 'apiVersion': 1, 'commands': dict({ diff --git a/tests/components/vicare/snapshots/test_fan.ambr b/tests/components/vicare/snapshots/test_fan.ambr new file mode 100644 index 00000000000..48c8d728569 --- /dev/null +++ b/tests/components/vicare/snapshots/test_fan.ambr @@ -0,0 +1,64 @@ +# serializer version: 1 +# name: test_all_entities[fan.model0_ventilation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'preset_modes': list([ + , + , + , + , + ]), + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'fan', + 'entity_category': None, + 'entity_id': 'fan.model0_ventilation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Ventilation', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'ventilation', + 'unique_id': 'gateway0-ventilation', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[fan.model0_ventilation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Ventilation', + 'percentage': 0, + 'percentage_step': 25.0, + 'preset_mode': None, + 'preset_modes': list([ + , + , + , + , + ]), + 'supported_features': , + }), + 'context': , + 'entity_id': 'fan.model0_ventilation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'on', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_number.ambr b/tests/components/vicare/snapshots/test_number.ambr new file mode 100644 index 00000000000..a55c29ab8c1 --- /dev/null +++ b/tests/components/vicare/snapshots/test_number.ambr @@ -0,0 +1,567 @@ +# serializer version: 1 +# name: test_all_entities[number.model0_comfort_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_comfort_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Comfort temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'comfort_temperature', + 'unique_id': 'gateway0-comfort_temperature-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_comfort_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Comfort temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_comfort_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_comfort_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_comfort_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Comfort temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'comfort_temperature', + 'unique_id': 'gateway0-comfort_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_comfort_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Comfort temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_comfort_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_heating_curve_shift-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 40, + 'min': -13, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_heating_curve_shift', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heating curve shift', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_curve_shift', + 'unique_id': 'gateway0-heating curve shift-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_heating_curve_shift-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Heating curve shift', + 'max': 40, + 'min': -13, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_heating_curve_shift', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_heating_curve_shift_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 40, + 'min': -13, + 'mode': , + 'step': 1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_heating_curve_shift_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Heating curve shift', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_curve_shift', + 'unique_id': 'gateway0-heating curve shift-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_heating_curve_shift_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Heating curve shift', + 'max': 40, + 'min': -13, + 'mode': , + 'step': 1, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_heating_curve_shift_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_heating_curve_slope-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3.5, + 'min': 0.2, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_heating_curve_slope', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating curve slope', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_curve_slope', + 'unique_id': 'gateway0-heating curve slope-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[number.model0_heating_curve_slope-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating curve slope', + 'max': 3.5, + 'min': 0.2, + 'mode': , + 'step': 0.1, + }), + 'context': , + 'entity_id': 'number.model0_heating_curve_slope', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_heating_curve_slope_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 3.5, + 'min': 0.2, + 'mode': , + 'step': 0.1, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_heating_curve_slope_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating curve slope', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'heating_curve_slope', + 'unique_id': 'gateway0-heating curve slope-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[number.model0_heating_curve_slope_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating curve slope', + 'max': 3.5, + 'min': 0.2, + 'mode': , + 'step': 0.1, + }), + 'context': , + 'entity_id': 'number.model0_heating_curve_slope_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_normal_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_normal_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Normal temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'normal_temperature', + 'unique_id': 'gateway0-normal_temperature-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_normal_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Normal temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_normal_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_normal_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_normal_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Normal temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'normal_temperature', + 'unique_id': 'gateway0-normal_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_normal_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Normal temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_normal_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_reduced_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_reduced_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reduced temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reduced_temperature', + 'unique_id': 'gateway0-reduced_temperature-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_reduced_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Reduced temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_reduced_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- +# name: test_all_entities[number.model0_reduced_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'number', + 'entity_category': , + 'entity_id': 'number.model0_reduced_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Reduced temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'reduced_temperature', + 'unique_id': 'gateway0-reduced_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[number.model0_reduced_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Reduced temperature', + 'max': 100.0, + 'min': 0.0, + 'mode': , + 'step': 1.0, + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'number.model0_reduced_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unavailable', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_sensor.ambr b/tests/components/vicare/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..7bbac75bedc --- /dev/null +++ b/tests/components/vicare/snapshots/test_sensor.ambr @@ -0,0 +1,1052 @@ +# serializer version: 1 +# name: test_all_entities[sensor.model0_boiler_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_boiler_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Boiler temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'boiler_temperature', + 'unique_id': 'gateway0-boiler_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_boiler_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Boiler temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_boiler_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '63', + }) +# --- +# name: test_all_entities[sensor.model0_burner_hours-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_burner_hours', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Burner hours', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'burner_hours', + 'unique_id': 'gateway0-burner_hours-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_burner_hours-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Burner hours', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_burner_hours', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '18726.3', + }) +# --- +# name: test_all_entities[sensor.model0_burner_modulation-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_burner_modulation', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Burner modulation', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'burner_modulation', + 'unique_id': 'gateway0-burner_modulation-0', + 'unit_of_measurement': '%', + }) +# --- +# name: test_all_entities[sensor.model0_burner_modulation-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Burner modulation', + 'state_class': , + 'unit_of_measurement': '%', + }), + 'context': , + 'entity_id': 'sensor.model0_burner_modulation', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.model0_burner_starts-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': , + 'entity_id': 'sensor.model0_burner_starts', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Burner starts', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'burner_starts', + 'unique_id': 'gateway0-burner_starts-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_burner_starts-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Burner starts', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_burner_starts', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '14315', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DHW gas consumption this month', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_gas_consumption_heating_this_month', + 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_month', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 DHW gas consumption this month', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '805', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_week-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_week', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DHW gas consumption this week', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_gas_consumption_heating_this_week', + 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_week', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_week-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 DHW gas consumption this week', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_week', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '84', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DHW gas consumption this year', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_gas_consumption_heating_this_year', + 'unique_id': 'gateway0-hotwater_gas_consumption_heating_this_year', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_this_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 DHW gas consumption this year', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_gas_consumption_this_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '8203', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_gas_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'DHW gas consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_gas_consumption_today', + 'unique_id': 'gateway0-hotwater_gas_consumption_today', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_dhw_gas_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 DHW gas consumption today', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_gas_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '22', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_max_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_max_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW max temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_max_temperature', + 'unique_id': 'gateway0-hotwater_max_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_dhw_max_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW max temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_max_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '60', + }) +# --- +# name: test_all_entities[sensor.model0_dhw_min_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_dhw_min_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'DHW min temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'hotwater_min_temperature', + 'unique_id': 'gateway0-hotwater_min_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_dhw_min_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 DHW min temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_dhw_min_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '10', + }) +# --- +# name: test_all_entities[sensor.model0_energy-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_energy', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power consumption this month', + 'unique_id': 'gateway0-power consumption this month', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_energy-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Energy', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_energy', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '7.843', + }) +# --- +# name: test_all_entities[sensor.model0_energy_consumption_this_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_energy_consumption_this_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy consumption this year', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_consumption_this_year', + 'unique_id': 'gateway0-power consumption this year', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_energy_consumption_this_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Energy consumption this year', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_energy_consumption_this_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '207.106', + }) +# --- +# name: test_all_entities[sensor.model0_energy_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_energy_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Energy consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_consumption_today', + 'unique_id': 'gateway0-power consumption today', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_energy_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Energy consumption today', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_energy_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.219', + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_month-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_gas_consumption_this_month', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating gas consumption this month', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gas_consumption_heating_this_month', + 'unique_id': 'gateway0-gas_consumption_heating_this_month', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_month-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating gas consumption this month', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_gas_consumption_this_month', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_week-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_gas_consumption_this_week', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating gas consumption this week', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gas_consumption_heating_this_week', + 'unique_id': 'gateway0-gas_consumption_heating_this_week', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_week-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating gas consumption this week', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_gas_consumption_this_week', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_year-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_gas_consumption_this_year', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating gas consumption this year', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gas_consumption_heating_this_year', + 'unique_id': 'gateway0-gas_consumption_heating_this_year', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_this_year-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating gas consumption this year', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_gas_consumption_this_year', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '30946', + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_today-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_heating_gas_consumption_today', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Heating gas consumption today', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'gas_consumption_heating_today', + 'unique_id': 'gateway0-gas_consumption_heating_today', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[sensor.model0_heating_gas_consumption_today-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'friendly_name': 'model0 Heating gas consumption today', + 'state_class': , + }), + 'context': , + 'entity_id': 'sensor.model0_heating_gas_consumption_today', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0', + }) +# --- +# name: test_all_entities[sensor.model0_outside_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_outside_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Outside temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'outside_temperature', + 'unique_id': 'gateway0-outside_temperature', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_outside_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Outside temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_outside_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '20.8', + }) +# --- +# name: test_all_entities[sensor.model0_power_consumption_this_week-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_power_consumption_this_week', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Power consumption this week', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'power_consumption_this_week', + 'unique_id': 'gateway0-power consumption this week', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_power_consumption_this_week-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'energy', + 'friendly_name': 'model0 Power consumption this week', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_power_consumption_this_week', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '0.829', + }) +# --- +# name: test_all_entities[sensor.model0_supply_temperature-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_supply_temperature', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_temperature', + 'unique_id': 'gateway0-supply_temperature-0', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_supply_temperature-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Supply temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_supply_temperature', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '63', + }) +# --- +# name: test_all_entities[sensor.model0_supply_temperature_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'state_class': , + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'sensor', + 'entity_category': None, + 'entity_id': 'sensor.model0_supply_temperature_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': , + 'original_icon': None, + 'original_name': 'Supply temperature', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': 0, + 'translation_key': 'supply_temperature', + 'unique_id': 'gateway0-supply_temperature-1', + 'unit_of_measurement': , + }) +# --- +# name: test_all_entities[sensor.model0_supply_temperature_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'device_class': 'temperature', + 'friendly_name': 'model0 Supply temperature', + 'state_class': , + 'unit_of_measurement': , + }), + 'context': , + 'entity_id': 'sensor.model0_supply_temperature_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': '25.5', + }) +# --- diff --git a/tests/components/vicare/snapshots/test_water_heater.ambr b/tests/components/vicare/snapshots/test_water_heater.ambr new file mode 100644 index 00000000000..5ab4fcc78bd --- /dev/null +++ b/tests/components/vicare/snapshots/test_water_heater.ambr @@ -0,0 +1,113 @@ +# serializer version: 1 +# name: test_all_entities[water_heater.model0_domestic_hot_water-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 60, + 'min_temp': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.model0_domestic_hot_water', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domestic hot water', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'domestic_hot_water', + 'unique_id': 'gateway0-0', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[water_heater.model0_domestic_hot_water-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'model0 Domestic hot water', + 'max_temp': 60, + 'min_temp': 10, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.model0_domestic_hot_water', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- +# name: test_all_entities[water_heater.model0_domestic_hot_water_2-entry] + EntityRegistryEntrySnapshot({ + 'aliases': set({ + }), + 'area_id': None, + 'capabilities': dict({ + 'max_temp': 60, + 'min_temp': 10, + }), + 'config_entry_id': , + 'device_class': None, + 'device_id': , + 'disabled_by': None, + 'domain': 'water_heater', + 'entity_category': None, + 'entity_id': 'water_heater.model0_domestic_hot_water_2', + 'has_entity_name': True, + 'hidden_by': None, + 'icon': None, + 'id': , + 'labels': set({ + }), + 'name': None, + 'options': dict({ + }), + 'original_device_class': None, + 'original_icon': None, + 'original_name': 'Domestic hot water', + 'platform': 'vicare', + 'previous_unique_id': None, + 'supported_features': , + 'translation_key': 'domestic_hot_water', + 'unique_id': 'gateway0-1', + 'unit_of_measurement': None, + }) +# --- +# name: test_all_entities[water_heater.model0_domestic_hot_water_2-state] + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'current_temperature': None, + 'friendly_name': 'model0 Domestic hot water', + 'max_temp': 60, + 'min_temp': 10, + 'supported_features': , + 'target_temp_high': None, + 'target_temp_low': None, + 'temperature': None, + }), + 'context': , + 'entity_id': 'water_heater.model0_domestic_hot_water_2', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'unknown', + }) +# --- diff --git a/tests/components/vicare/test_binary_sensor.py b/tests/components/vicare/test_binary_sensor.py index 79ce91642af..b9b8a57a59b 100644 --- a/tests/components/vicare/test_binary_sensor.py +++ b/tests/components/vicare/test_binary_sensor.py @@ -1,11 +1,18 @@ """Test ViCare binary sensors.""" -from unittest.mock import MagicMock +from unittest.mock import MagicMock, patch import pytest from syrupy.assertion import SnapshotAssertion +from homeassistant.const import Platform from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform @pytest.mark.parametrize( @@ -24,3 +31,21 @@ async def test_binary_sensors( ) -> None: """Test the ViCare binary sensor.""" assert hass.states.get(f"binary_sensor.model0_{entity_id}") == snapshot + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.BINARY_SENSOR]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_button.py b/tests/components/vicare/test_button.py new file mode 100644 index 00000000000..c024af41d78 --- /dev/null +++ b/tests/components/vicare/test_button.py @@ -0,0 +1,33 @@ +"""Test ViCare button entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.BUTTON]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_climate.py b/tests/components/vicare/test_climate.py new file mode 100644 index 00000000000..44df87276e7 --- /dev/null +++ b/tests/components/vicare/test_climate.py @@ -0,0 +1,33 @@ +"""Test ViCare climate entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.CLIMATE]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_config_flow.py b/tests/components/vicare/test_config_flow.py index b823bb72dc9..a522cf75d5d 100644 --- a/tests/components/vicare/test_config_flow.py +++ b/tests/components/vicare/test_config_flow.py @@ -11,7 +11,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant.components import dhcp from homeassistant.components.vicare.const import DOMAIN -from homeassistant.config_entries import SOURCE_DHCP, SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_DHCP, SOURCE_USER from homeassistant.const import CONF_CLIENT_ID, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -104,11 +104,7 @@ async def test_step_reauth(hass: HomeAssistant, mock_setup_entry: AsyncMock) -> ) config_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": config_entry.entry_id}, - data=VALID_CONFIG, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/vicare/test_fan.py b/tests/components/vicare/test_fan.py new file mode 100644 index 00000000000..ba5db6e42c7 --- /dev/null +++ b/tests/components/vicare/test_fan.py @@ -0,0 +1,33 @@ +"""Test ViCare fan.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:ventilation"}, "vicare/ViAir300F.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.FAN]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_number.py b/tests/components/vicare/test_number.py new file mode 100644 index 00000000000..c3aa66a86f6 --- /dev/null +++ b/tests/components/vicare/test_number.py @@ -0,0 +1,33 @@ +"""Test ViCare number entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.NUMBER]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_sensor.py b/tests/components/vicare/test_sensor.py new file mode 100644 index 00000000000..624fdf2cd5d --- /dev/null +++ b/tests/components/vicare/test_sensor.py @@ -0,0 +1,33 @@ +"""Test ViCare sensor entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.SENSOR]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vicare/test_types.py b/tests/components/vicare/test_types.py index 575e549f0d9..13d8255cf8d 100644 --- a/tests/components/vicare/test_types.py +++ b/tests/components/vicare/test_types.py @@ -3,7 +3,8 @@ import pytest from homeassistant.components.climate import PRESET_COMFORT, PRESET_SLEEP -from homeassistant.components.vicare.types import HeatingProgram, VentilationMode +from homeassistant.components.vicare.fan import VentilationMode +from homeassistant.components.vicare.types import HeatingProgram @pytest.mark.parametrize( diff --git a/tests/components/vicare/test_water_heater.py b/tests/components/vicare/test_water_heater.py new file mode 100644 index 00000000000..fbb5863cf7a --- /dev/null +++ b/tests/components/vicare/test_water_heater.py @@ -0,0 +1,33 @@ +"""Test ViCare water heater entity.""" + +from unittest.mock import patch + +import pytest +from syrupy.assertion import SnapshotAssertion + +from homeassistant.const import Platform +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + +from . import MODULE, setup_integration +from .conftest import Fixture, MockPyViCare + +from tests.common import MockConfigEntry, snapshot_platform + + +@pytest.mark.usefixtures("entity_registry_enabled_by_default") +async def test_all_entities( + hass: HomeAssistant, + snapshot: SnapshotAssertion, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, +) -> None: + """Test all entities.""" + fixtures: list[Fixture] = [Fixture({"type:boiler"}, "vicare/Vitodens300W.json")] + with ( + patch(f"{MODULE}.vicare_login", return_value=MockPyViCare(fixtures)), + patch(f"{MODULE}.PLATFORMS", [Platform.WATER_HEATER]), + ): + await setup_integration(hass, mock_config_entry) + + await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) diff --git a/tests/components/vizio/conftest.py b/tests/components/vizio/conftest.py index f33c7839c72..923509dea2c 100644 --- a/tests/components/vizio/conftest.py +++ b/tests/components/vizio/conftest.py @@ -30,7 +30,7 @@ from .const import ( class MockInput: """Mock Vizio device input.""" - def __init__(self, name): + def __init__(self, name) -> None: """Initialize mock Vizio device input.""" self.meta_name = name self.name = name diff --git a/tests/components/vlc_telnet/test_config_flow.py b/tests/components/vlc_telnet/test_config_flow.py index 54edafab14a..d29a2c06beb 100644 --- a/tests/components/vlc_telnet/test_config_flow.py +++ b/tests/components/vlc_telnet/test_config_flow.py @@ -153,15 +153,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain=DOMAIN, data=entry_data) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry_data, - ) + result = await entry.start_reauth_flow(hass) with ( patch("homeassistant.components.vlc_telnet.config_flow.Client.connect"), @@ -209,15 +201,7 @@ async def test_reauth_errors( entry = MockConfigEntry(domain=DOMAIN, data=entry_data) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "unique_id": entry.unique_id, - }, - data=entry_data, - ) + result = await entry.start_reauth_flow(hass) with ( patch( diff --git a/tests/components/vodafone_station/test_config_flow.py b/tests/components/vodafone_station/test_config_flow.py index 0492d32070f..3a54f250871 100644 --- a/tests/components/vodafone_station/test_config_flow.py +++ b/tests/components/vodafone_station/test_config_flow.py @@ -7,7 +7,7 @@ import pytest from homeassistant.components.device_tracker import CONF_CONSIDER_HOME from homeassistant.components.vodafone_station.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_USERNAME from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -124,6 +124,9 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" with ( patch( @@ -136,15 +139,6 @@ async def test_reauth_successful(hass: HomeAssistant) -> None: "homeassistant.components.vodafone_station.async_setup_entry", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ @@ -172,6 +166,10 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> mock_config = MockConfigEntry(domain=DOMAIN, data=MOCK_USER_DATA) mock_config.add_to_hass(hass) + result = await mock_config.start_reauth_flow(hass) + assert result["type"] is FlowResultType.FORM + assert result["step_id"] == "reauth_confirm" + with ( patch( "homeassistant.components.vodafone_station.config_flow.VodafoneStationSercommApi.login", @@ -184,15 +182,6 @@ async def test_reauth_not_successful(hass: HomeAssistant, side_effect, error) -> "homeassistant.components.vodafone_station.async_setup_entry", ), ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": mock_config.entry_id}, - data=mock_config.data, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "reauth_confirm" - result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={ diff --git a/tests/components/voicerss/test_tts.py b/tests/components/voicerss/test_tts.py index 1a2ad002586..776c0ac153a 100644 --- a/tests/components/voicerss/test_tts.py +++ b/tests/components/voicerss/test_tts.py @@ -36,9 +36,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/voip/test_voip.py b/tests/components/voip/test_voip.py index c2978afc17f..aab35bfd029 100644 --- a/tests/components/voip/test_voip.py +++ b/tests/components/voip/test_voip.py @@ -19,9 +19,8 @@ _MEDIA_ID = "12345" @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir def _empty_wav() -> bytes: diff --git a/tests/components/volvooncall/test_config_flow.py b/tests/components/volvooncall/test_config_flow.py index 8bf8bcc7412..5268432c17e 100644 --- a/tests/components/volvooncall/test_config_flow.py +++ b/tests/components/volvooncall/test_config_flow.py @@ -153,13 +153,7 @@ async def test_reauth(hass: HomeAssistant) -> None: ) first_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": first_entry.entry_id, - }, - ) + result = await first_entry.start_reauth_flow(hass) # the first form is just the confirmation prompt assert result["type"] is FlowResultType.FORM diff --git a/tests/components/vulcan/test_config_flow.py b/tests/components/vulcan/test_config_flow.py index 3311f3c71b2..a72e77b32e8 100644 --- a/tests/components/vulcan/test_config_flow.py +++ b/tests/components/vulcan/test_config_flow.py @@ -137,14 +137,13 @@ async def test_config_flow_reauth_success( mock_student.return_value = [ Student.load(load_fixture("fake_student_1.json", "vulcan")) ] - MockConfigEntry( + entry = MockConfigEntry( domain=const.DOMAIN, unique_id="0", data={"student_id": "0"}, - ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -176,14 +175,13 @@ async def test_config_flow_reauth_without_matching_entries( mock_student.return_value = [ Student.load(load_fixture("fake_student_1.json", "vulcan")) ] - MockConfigEntry( + entry = MockConfigEntry( domain=const.DOMAIN, unique_id="0", data={"student_id": "1"}, - ).add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -206,9 +204,13 @@ async def test_config_flow_reauth_with_errors( """Test reauth config flow with errors.""" mock_keystore.return_value = fake_keystore mock_account.return_value = fake_account - result = await hass.config_entries.flow.async_init( - const.DOMAIN, context={"source": config_entries.SOURCE_REAUTH} + entry = MockConfigEntry( + domain=const.DOMAIN, + unique_id="0", + data={"student_id": "0"}, ) + entry.add_to_hass(hass) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" assert result["errors"] == {} diff --git a/tests/components/wallbox/test_config_flow.py b/tests/components/wallbox/test_config_flow.py index c0ff0b19c94..cc38576eb2f 100644 --- a/tests/components/wallbox/test_config_flow.py +++ b/tests/components/wallbox/test_config_flow.py @@ -160,13 +160,7 @@ async def test_form_reauth(hass: HomeAssistant, entry: MockConfigEntry) -> None: status_code=200, ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], @@ -201,13 +195,7 @@ async def test_form_reauth_invalid(hass: HomeAssistant, entry: MockConfigEntry) status_code=200, ) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - }, - ) + result = await entry.start_reauth_flow(hass) result2 = await hass.config_entries.flow.async_configure( result["flow_id"], diff --git a/tests/components/water_heater/common.py b/tests/components/water_heater/common.py index e0a8075f4cc..e2fca153fe6 100644 --- a/tests/components/water_heater/common.py +++ b/tests/components/water_heater/common.py @@ -19,7 +19,9 @@ from homeassistant.const import ATTR_ENTITY_ID, ATTR_TEMPERATURE, ENTITY_MATCH_A from homeassistant.core import HomeAssistant -async def async_set_away_mode(hass, away_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_away_mode( + hass: HomeAssistant, away_mode: bool, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Turn all or specified water_heater devices away mode on.""" data = {ATTR_AWAY_MODE: away_mode} @@ -30,8 +32,11 @@ async def async_set_away_mode(hass, away_mode, entity_id=ENTITY_MATCH_ALL): async def async_set_temperature( - hass, temperature=None, entity_id=ENTITY_MATCH_ALL, operation_mode=None -): + hass: HomeAssistant, + temperature: float, + entity_id: str = ENTITY_MATCH_ALL, + operation_mode: str | None = None, +) -> None: """Set new target temperature.""" kwargs = { key: value @@ -48,7 +53,9 @@ async def async_set_temperature( ) -async def async_set_operation_mode(hass, operation_mode, entity_id=ENTITY_MATCH_ALL): +async def async_set_operation_mode( + hass: HomeAssistant, operation_mode: str, entity_id: str = ENTITY_MATCH_ALL +) -> None: """Set new target operation mode.""" data = {ATTR_OPERATION_MODE: operation_mode} diff --git a/tests/components/water_heater/test_init.py b/tests/components/water_heater/test_init.py index f883cf47b19..4e0f860366c 100644 --- a/tests/components/water_heater/test_init.py +++ b/tests/components/water_heater/test_init.py @@ -22,6 +22,7 @@ from homeassistant.config_entries import ConfigEntry from homeassistant.const import UnitOfTemperature from homeassistant.core import HomeAssistant from homeassistant.exceptions import ServiceValidationError +from homeassistant.helpers import config_validation as cv from homeassistant.helpers.entity_platform import AddEntitiesCallback from tests.common import ( @@ -42,7 +43,7 @@ async def test_set_temp_schema_no_req( """Test the set temperature schema with missing required data.""" domain = "climate" service = "test_set_temperature" - schema = SET_TEMPERATURE_SCHEMA + schema = cv.make_entity_service_schema(SET_TEMPERATURE_SCHEMA) calls = async_mock_service(hass, domain, service, schema) data = {"hvac_mode": "off", "entity_id": ["climate.test_id"]} @@ -59,7 +60,7 @@ async def test_set_temp_schema( """Test the set temperature schema with ok required data.""" domain = "water_heater" service = "test_set_temperature" - schema = SET_TEMPERATURE_SCHEMA + schema = cv.make_entity_service_schema(SET_TEMPERATURE_SCHEMA) calls = async_mock_service(hass, domain, service, schema) data = { diff --git a/tests/components/watttime/conftest.py b/tests/components/watttime/conftest.py index 0b7403d45fc..650d07b36a1 100644 --- a/tests/components/watttime/conftest.py +++ b/tests/components/watttime/conftest.py @@ -1,6 +1,7 @@ """Define test fixtures for WattTime.""" -import json +from collections.abc import AsyncGenerator +from typing import Any from unittest.mock import AsyncMock, Mock, patch import pytest @@ -20,13 +21,17 @@ from homeassistant.const import ( CONF_PASSWORD, CONF_USERNAME, ) +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component +from homeassistant.util.json import JsonObjectType -from tests.common import MockConfigEntry, load_fixture +from tests.common import MockConfigEntry, load_json_object_fixture @pytest.fixture(name="client") -def client_fixture(get_grid_region, data_realtime_emissions): +def client_fixture( + get_grid_region: AsyncMock, data_realtime_emissions: JsonObjectType +) -> Mock: """Define an aiowatttime client.""" client = Mock() client.emissions.async_get_grid_region = get_grid_region @@ -37,7 +42,7 @@ def client_fixture(get_grid_region, data_realtime_emissions): @pytest.fixture(name="config_auth") -def config_auth_fixture(hass): +def config_auth_fixture() -> dict[str, Any]: """Define an auth config entry data fixture.""" return { CONF_USERNAME: "user", @@ -46,7 +51,7 @@ def config_auth_fixture(hass): @pytest.fixture(name="config_coordinates") -def config_coordinates_fixture(hass): +def config_coordinates_fixture() -> dict[str, Any]: """Define a coordinates config entry data fixture.""" return { CONF_LATITUDE: 32.87336, @@ -55,7 +60,7 @@ def config_coordinates_fixture(hass): @pytest.fixture(name="config_location_type") -def config_location_type_fixture(hass): +def config_location_type_fixture() -> dict[str, Any]: """Define a location type config entry data fixture.""" return { CONF_LOCATION_TYPE: LOCATION_TYPE_COORDINATES, @@ -63,7 +68,9 @@ def config_location_type_fixture(hass): @pytest.fixture(name="config_entry") -def config_entry_fixture(hass, config_auth, config_coordinates): +def config_entry_fixture( + hass: HomeAssistant, config_auth: dict[str, Any], config_coordinates: dict[str, Any] +) -> MockConfigEntry: """Define a config entry fixture.""" entry = MockConfigEntry( domain=DOMAIN, @@ -82,25 +89,30 @@ def config_entry_fixture(hass, config_auth, config_coordinates): @pytest.fixture(name="data_grid_region", scope="package") -def data_grid_region_fixture(): +def data_grid_region_fixture() -> JsonObjectType: """Define grid region data.""" - return json.loads(load_fixture("grid_region_data.json", "watttime")) + return load_json_object_fixture("grid_region_data.json", "watttime") @pytest.fixture(name="data_realtime_emissions", scope="package") -def data_realtime_emissions_fixture(): +def data_realtime_emissions_fixture() -> JsonObjectType: """Define realtime emissions data.""" - return json.loads(load_fixture("realtime_emissions_data.json", "watttime")) + return load_json_object_fixture("realtime_emissions_data.json", "watttime") @pytest.fixture(name="get_grid_region") -def get_grid_region_fixture(data_grid_region): +def get_grid_region_fixture(data_grid_region: JsonObjectType) -> AsyncMock: """Define an aiowatttime method to get grid region data.""" return AsyncMock(return_value=data_grid_region) @pytest.fixture(name="setup_watttime") -async def setup_watttime_fixture(hass, client, config_auth, config_coordinates): +async def setup_watttime_fixture( + hass: HomeAssistant, + client: Mock, + config_auth: dict[str, Any], + config_coordinates: dict[str, Any], +) -> AsyncGenerator[None]: """Define a fixture to set up WattTime.""" with ( patch( diff --git a/tests/components/watttime/test_config_flow.py b/tests/components/watttime/test_config_flow.py index f8eee6b48bf..5087717491f 100644 --- a/tests/components/watttime/test_config_flow.py +++ b/tests/components/watttime/test_config_flow.py @@ -25,6 +25,8 @@ from homeassistant.const import ( from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType +from tests.common import MockConfigEntry + @pytest.mark.parametrize( ("exc", "error"), @@ -144,21 +146,16 @@ async def test_show_form_user(hass: HomeAssistant) -> None: async def test_step_reauth( - hass: HomeAssistant, config_auth, config_coordinates, config_entry, setup_watttime + hass: HomeAssistant, + config_entry: MockConfigEntry, + setup_watttime, ) -> None: """Test a full reauth flow.""" + result = await config_entry.start_reauth_flow(hass) with patch( "homeassistant.components.watttime.async_setup_entry", return_value=True, ): - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data={ - **config_auth, - **config_coordinates, - }, - ) result = await hass.config_entries.flow.async_configure( result["flow_id"], user_input={CONF_PASSWORD: "password"}, diff --git a/tests/components/weatherflow_cloud/conftest.py b/tests/components/weatherflow_cloud/conftest.py index 36b42bf24a8..d83ee082b26 100644 --- a/tests/components/weatherflow_cloud/conftest.py +++ b/tests/components/weatherflow_cloud/conftest.py @@ -113,3 +113,39 @@ def mock_api(): mock_api_class.return_value = mock_api yield mock_api + + +# +# @pytest.fixture +# def mock_api_with_lightning_error(): +# """Fixture for Mock WeatherFlowRestAPI.""" +# get_stations_response_data = StationsResponseREST.from_json( +# load_fixture("stations.json", DOMAIN) +# ) +# get_forecast_response_data = WeatherDataForecastREST.from_json( +# load_fixture("forecast.json", DOMAIN) +# ) +# get_observation_response_data = ObservationStationREST.from_json( +# load_fixture("station_observation_error.json", DOMAIN) +# ) +# +# data = { +# 24432: WeatherFlowDataREST( +# weather=get_forecast_response_data, +# observation=get_observation_response_data, +# station=get_stations_response_data.stations[0], +# device_observations=None, +# ) +# } +# +# with patch( +# "homeassistant.components.weatherflow_cloud.coordinator.WeatherFlowRestAPI", +# autospec=True, +# ) as mock_api_class: +# # Create an instance of AsyncMock for the API +# mock_api = AsyncMock() +# mock_api.get_all_data.return_value = data +# # Patch the class to return our mock_api instance +# mock_api_class.return_value = mock_api +# +# yield mock_api diff --git a/tests/components/weatherflow_cloud/fixtures/station_observation_error.json b/tests/components/weatherflow_cloud/fixtures/station_observation_error.json new file mode 100644 index 00000000000..41bb452c911 --- /dev/null +++ b/tests/components/weatherflow_cloud/fixtures/station_observation_error.json @@ -0,0 +1,99 @@ +{ + "elevation": 2063.150146484375, + "is_public": true, + "latitude": 43.94962, + "longitude": -102.86831, + "obs": [ + { + "air_density": 0.96139, + "air_temperature": 10.5, + "barometric_pressure": 782.8, + "brightness": 757, + "delta_t": 8.4, + "dew_point": -10.4, + "feels_like": 10.5, + "heat_index": 10.5, + "lightning_strike_count": 0, + "lightning_strike_count_last_1hr": 0, + "lightning_strike_count_last_3hr": 0, + "lightning_strike_last_distance": 26, + "precip": 0.0, + "precip_accum_last_1hr": 0.0, + "precip_accum_local_day": 0.0, + "precip_accum_local_day_final": 0.0, + "precip_accum_local_yesterday": 0.0, + "precip_accum_local_yesterday_final": 0.0, + "precip_analysis_type_yesterday": 0, + "precip_minutes_local_day": 0, + "precip_minutes_local_yesterday": 0, + "precip_minutes_local_yesterday_final": 0, + "pressure_trend": "steady", + "relative_humidity": 22, + "sea_level_pressure": 1006.2, + "solar_radiation": 6, + "station_pressure": 782.8, + "timestamp": 1708994629, + "uv": 0.03, + "wet_bulb_globe_temperature": 4.6, + "wet_bulb_temperature": 2.1, + "wind_avg": 1.4, + "wind_chill": 10.5, + "wind_direction": 203, + "wind_gust": 3.2, + "wind_lull": 0.3 + } + ], + "outdoor_keys": [ + "timestamp", + "air_temperature", + "barometric_pressure", + "station_pressure", + "pressure_trend", + "sea_level_pressure", + "relative_humidity", + "precip", + "precip_accum_last_1hr", + "precip_accum_local_day", + "precip_accum_local_day_final", + "precip_accum_local_yesterday_final", + "precip_minutes_local_day", + "precip_minutes_local_yesterday_final", + "wind_avg", + "wind_direction", + "wind_gust", + "wind_lull", + "solar_radiation", + "uv", + "brightness", + "lightning_strike_last_epoch", + "lightning_strike_last_distance", + "lightning_strike_count", + "lightning_strike_count_last_1hr", + "lightning_strike_count_last_3hr", + "feels_like", + "heat_index", + "wind_chill", + "dew_point", + "wet_bulb_temperature", + "wet_bulb_globe_temperature", + "delta_t", + "air_density" + ], + "public_name": "My Home Station", + "station_id": 24432, + "station_name": "My Home Station", + "station_units": { + "units_direction": "degrees", + "units_distance": "mi", + "units_other": "metric", + "units_precip": "in", + "units_pressure": "hpa", + "units_temp": "f", + "units_wind": "bft" + }, + "status": { + "status_code": 0, + "status_message": "SUCCESS" + }, + "timezone": "America/Denver" +} diff --git a/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr index f7b635eb4fa..95be86664a2 100644 --- a/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr +++ b/tests/components/weatherflow_cloud/snapshots/test_sensor.ambr @@ -53,122 +53,6 @@ 'state': '0.96139', }) # --- -# name: test_all_entities[sensor.my_home_station_atmospheric_pressure-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_atmospheric_pressure', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Atmospheric pressure', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'barometric_pressure', - 'unique_id': '24432_barometric_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_atmospheric_pressure-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'atmospheric_pressure', - 'friendly_name': 'My Home Station Atmospheric pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_atmospheric_pressure', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '782.8', - }) -# --- -# name: test_all_entities[sensor.my_home_station_atmospheric_pressure_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_atmospheric_pressure_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 3, - }), - 'sensor.private': dict({ - 'suggested_unit_of_measurement': , - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Atmospheric pressure', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'sea_level_pressure', - 'unique_id': '24432_sea_level_pressure', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_atmospheric_pressure_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'atmospheric_pressure', - 'friendly_name': 'My Home Station Atmospheric pressure', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_atmospheric_pressure_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '1006.2', - }) -# --- # name: test_all_entities[sensor.my_home_station_dew_point-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -224,58 +108,6 @@ 'state': '-10.4', }) # --- -# name: test_all_entities[sensor.my_home_station_distance-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_distance', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Distance', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_last_distance', - 'unique_id': '24432_lightning_strike_last_distance', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_distance-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'distance', - 'friendly_name': 'My Home Station Distance', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_distance', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '26', - }) -# --- # name: test_all_entities[sensor.my_home_station_feels_like-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -636,210 +468,6 @@ 'state': '2024-02-07T23:01:15+00:00', }) # --- -# name: test_all_entities[sensor.my_home_station_none-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_none', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 5, - }), - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'air_density', - 'unique_id': '24432_air_density', - 'unit_of_measurement': 'kg/m³', - }) -# --- -# name: test_all_entities[sensor.my_home_station_none-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station None', - 'state_class': , - 'unit_of_measurement': 'kg/m³', - }), - 'context': , - 'entity_id': 'sensor.my_home_station_none', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0.96139', - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_none_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count', - 'unique_id': '24432_lightning_strike_count', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_none_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_none_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count_last_1hr', - 'unique_id': '24432_lightning_strike_count_last_1hr', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_none_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_none_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': None, - 'original_icon': None, - 'original_name': None, - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_count_last_3hr', - 'unique_id': '24432_lightning_strike_count_last_3hr', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_none_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'friendly_name': 'My Home Station None', - 'state_class': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_none_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '0', - }) -# --- # name: test_all_entities[sensor.my_home_station_pressure_barometric-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ @@ -1011,384 +639,6 @@ 'state': '10.5', }) # --- -# name: test_all_entities[sensor.my_home_station_temperature_2-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_2', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'dew_point', - 'unique_id': '24432_dew_point', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_2-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_2', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '-10.4', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_3-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_3', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'feels_like', - 'unique_id': '24432_feels_like', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_3-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_3', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_4-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_4', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'heat_index', - 'unique_id': '24432_heat_index', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_4-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_4', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_5-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_5', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wind_chill', - 'unique_id': '24432_wind_chill', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_5-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_5', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '10.5', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_6-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_6', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wet_bulb_temperature', - 'unique_id': '24432_wet_bulb_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_6-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_6', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2.1', - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_7-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': dict({ - 'state_class': , - }), - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_temperature_7', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - 'sensor': dict({ - 'suggested_display_precision': 1, - }), - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Temperature', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'wet_bulb_globe_temperature', - 'unique_id': '24432_wet_bulb_globe_temperature', - 'unit_of_measurement': , - }) -# --- -# name: test_all_entities[sensor.my_home_station_temperature_7-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'temperature', - 'friendly_name': 'My Home Station Temperature', - 'state_class': , - 'unit_of_measurement': , - }), - 'context': , - 'entity_id': 'sensor.my_home_station_temperature_7', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '4.6', - }) -# --- -# name: test_all_entities[sensor.my_home_station_timestamp-entry] - EntityRegistryEntrySnapshot({ - 'aliases': set({ - }), - 'area_id': None, - 'capabilities': None, - 'config_entry_id': , - 'device_class': None, - 'device_id': , - 'disabled_by': None, - 'domain': 'sensor', - 'entity_category': None, - 'entity_id': 'sensor.my_home_station_timestamp', - 'has_entity_name': True, - 'hidden_by': None, - 'icon': None, - 'id': , - 'labels': set({ - }), - 'name': None, - 'options': dict({ - }), - 'original_device_class': , - 'original_icon': None, - 'original_name': 'Timestamp', - 'platform': 'weatherflow_cloud', - 'previous_unique_id': None, - 'supported_features': 0, - 'translation_key': 'lightning_strike_last_epoch', - 'unique_id': '24432_lightning_strike_last_epoch', - 'unit_of_measurement': None, - }) -# --- -# name: test_all_entities[sensor.my_home_station_timestamp-state] - StateSnapshot({ - 'attributes': ReadOnlyDict({ - 'attribution': 'Weather data delivered by WeatherFlow/Tempest REST Api', - 'device_class': 'timestamp', - 'friendly_name': 'My Home Station Timestamp', - }), - 'context': , - 'entity_id': 'sensor.my_home_station_timestamp', - 'last_changed': , - 'last_reported': , - 'last_updated': , - 'state': '2024-02-07T23:01:15+00:00', - }) -# --- # name: test_all_entities[sensor.my_home_station_wet_bulb_globe_temperature-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/weatherflow_cloud/test_config_flow.py b/tests/components/weatherflow_cloud/test_config_flow.py index 7ade007ceac..9dc5ad1322d 100644 --- a/tests/components/weatherflow_cloud/test_config_flow.py +++ b/tests/components/weatherflow_cloud/test_config_flow.py @@ -4,7 +4,7 @@ import pytest from homeassistant import config_entries from homeassistant.components.weatherflow_cloud.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, ConfigEntryState +from homeassistant.config_entries import ConfigEntryState from homeassistant.const import CONF_API_TOKEN from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType @@ -111,15 +111,14 @@ async def test_reauth(hass: HomeAssistant, mock_get_stations_401_error) -> None: assert not await hass.config_entries.async_setup(entry.entry_id) assert entry.state is ConfigEntryState.SETUP_ERROR - result = await hass.config_entries.flow.async_init( - DOMAIN, context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, data=None - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": SOURCE_REAUTH, "entry_id": entry.entry_id}, - data={CONF_API_TOKEN: "SAME_SAME"}, + assert result["step_id"] == "reauth_confirm" + + result = await hass.config_entries.flow.async_configure( + result["flow_id"], user_input={CONF_API_TOKEN: "SAME_SAME"} ) assert result["reason"] == "reauth_successful" assert result["type"] is FlowResultType.ABORT + assert entry.data[CONF_API_TOKEN] == "SAME_SAME" diff --git a/tests/components/weatherflow_cloud/test_sensor.py b/tests/components/weatherflow_cloud/test_sensor.py index 35ce098f5a7..4d6ff0c8c9f 100644 --- a/tests/components/weatherflow_cloud/test_sensor.py +++ b/tests/components/weatherflow_cloud/test_sensor.py @@ -1,16 +1,25 @@ """Tests for the WeatherFlow Cloud sensor platform.""" +from datetime import timedelta from unittest.mock import AsyncMock, patch +from freezegun.api import FrozenDateTimeFactory from syrupy import SnapshotAssertion +from weatherflow4py.models.rest.observation import ObservationStationREST -from homeassistant.const import Platform +from homeassistant.components.weatherflow_cloud import DOMAIN +from homeassistant.const import STATE_UNKNOWN, Platform from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from . import setup_integration -from tests.common import MockConfigEntry, snapshot_platform +from tests.common import ( + MockConfigEntry, + async_fire_time_changed, + load_fixture, + snapshot_platform, +) async def test_all_entities( @@ -27,3 +36,42 @@ async def test_all_entities( await setup_integration(hass, mock_config_entry) await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id) + + +async def test_all_entities_with_lightning_error( + hass: HomeAssistant, + mock_config_entry: MockConfigEntry, + entity_registry: er.EntityRegistry, + mock_api: AsyncMock, + freezer: FrozenDateTimeFactory, +) -> None: + """Test all entities.""" + + get_observation_response_data = ObservationStationREST.from_json( + load_fixture("station_observation_error.json", DOMAIN) + ) + + with patch( + "homeassistant.components.weatherflow_cloud.PLATFORMS", [Platform.SENSOR] + ): + await setup_integration(hass, mock_config_entry) + + assert ( + hass.states.get("sensor.my_home_station_lightning_last_strike").state + == "2024-02-07T23:01:15+00:00" + ) + + # Update the data in our API + all_data = await mock_api.get_all_data() + all_data[24432].observation = get_observation_response_data + mock_api.get_all_data.return_value = all_data + + # Move time forward + freezer.tick(timedelta(minutes=5)) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + hass.states.get("sensor.my_home_station_lightning_last_strike").state + == STATE_UNKNOWN + ) diff --git a/tests/components/weatherkit/snapshots/test_weather.ambr b/tests/components/weatherkit/snapshots/test_weather.ambr index 1fbe5389e98..f6fa2f1514b 100644 --- a/tests/components/weatherkit/snapshots/test_weather.ambr +++ b/tests/components/weatherkit/snapshots/test_weather.ambr @@ -1,294 +1,4 @@ # serializer version: 1 -# name: test_daily_forecast - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }) -# --- -# name: test_daily_forecast[forecast] - dict({ - 'weather.home': dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }), - }) -# --- -# name: test_daily_forecast[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 28.6, - 'templow': 21.2, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-09T15:00:00Z', - 'precipitation': 3.6, - 'precipitation_probability': 45.0, - 'temperature': 30.6, - 'templow': 21.0, - 'uv_index': 6, - }), - dict({ - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-11T15:00:00Z', - 'precipitation': 0.7, - 'precipitation_probability': 47.0, - 'temperature': 30.4, - 'templow': 23.1, - 'uv_index': 5, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-12T15:00:00Z', - 'precipitation': 7.7, - 'precipitation_probability': 37.0, - 'temperature': 30.4, - 'templow': 22.1, - 'uv_index': 6, - }), - dict({ - 'condition': 'rainy', - 'datetime': '2023-09-13T15:00:00Z', - 'precipitation': 0.6, - 'precipitation_probability': 45.0, - 'temperature': 31.0, - 'templow': 22.6, - 'uv_index': 6, - }), - dict({ - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'temperature': 31.5, - 'templow': 22.4, - 'uv_index': 7, - }), - dict({ - 'condition': 'sunny', - 'datetime': '2023-09-15T15:00:00Z', - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'temperature': 31.8, - 'templow': 23.3, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-16T15:00:00Z', - 'precipitation': 5.3, - 'precipitation_probability': 35.0, - 'temperature': 30.7, - 'templow': 23.2, - 'uv_index': 8, - }), - dict({ - 'condition': 'lightning', - 'datetime': '2023-09-17T15:00:00Z', - 'precipitation': 2.1, - 'precipitation_probability': 49.0, - 'temperature': 28.1, - 'templow': 22.5, - 'uv_index': 6, - }), - ]), - }) -# --- # name: test_daily_forecast[get_forecasts] dict({ 'weather.home': dict({ @@ -387,11978 +97,6 @@ }), }) # --- -# name: test_hourly_forecast - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }) -# --- -# name: test_hourly_forecast[forecast] - dict({ - 'weather.home': dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }), - }) -# --- -# name: test_hourly_forecast[get_forecast] - dict({ - 'forecast': list([ - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T14:00:00Z', - 'dew_point': 21.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 264, - 'wind_gust_speed': 13.44, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 80.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.24, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 261, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.64, - }), - dict({ - 'apparent_temperature': 23.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.12, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 252, - 'wind_gust_speed': 11.15, - 'wind_speed': 6.14, - }), - dict({ - 'apparent_temperature': 23.5, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.7, - 'uv_index': 0, - 'wind_bearing': 248, - 'wind_gust_speed': 11.57, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T18:00:00Z', - 'dew_point': 20.8, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.05, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 12.42, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 23.0, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.03, - 'temperature': 21.3, - 'uv_index': 0, - 'wind_bearing': 224, - 'wind_gust_speed': 11.3, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T20:00:00Z', - 'dew_point': 20.4, - 'humidity': 96, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.31, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 221, - 'wind_gust_speed': 10.57, - 'wind_speed': 5.13, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T21:00:00Z', - 'dew_point': 20.5, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.55, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 237, - 'wind_gust_speed': 10.63, - 'wind_speed': 5.7, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-08T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.79, - 'temperature': 22.8, - 'uv_index': 1, - 'wind_bearing': 258, - 'wind_gust_speed': 10.47, - 'wind_speed': 5.22, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-08T23:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.95, - 'temperature': 24.0, - 'uv_index': 2, - 'wind_bearing': 282, - 'wind_gust_speed': 12.74, - 'wind_speed': 5.71, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T00:00:00Z', - 'dew_point': 21.5, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.35, - 'temperature': 25.1, - 'uv_index': 3, - 'wind_bearing': 294, - 'wind_gust_speed': 13.87, - 'wind_speed': 6.53, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T01:00:00Z', - 'dew_point': 21.8, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 26.5, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 16.04, - 'wind_speed': 6.54, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T02:00:00Z', - 'dew_point': 22.0, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.23, - 'temperature': 27.6, - 'uv_index': 6, - 'wind_bearing': 314, - 'wind_gust_speed': 18.1, - 'wind_speed': 7.32, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T03:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.86, - 'temperature': 28.3, - 'uv_index': 6, - 'wind_bearing': 317, - 'wind_gust_speed': 20.77, - 'wind_speed': 9.1, - }), - dict({ - 'apparent_temperature': 31.5, - 'cloud_coverage': 69.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T04:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.65, - 'temperature': 28.6, - 'uv_index': 6, - 'wind_bearing': 311, - 'wind_gust_speed': 21.27, - 'wind_speed': 10.21, - }), - dict({ - 'apparent_temperature': 31.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T05:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.48, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 317, - 'wind_gust_speed': 19.62, - 'wind_speed': 10.53, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.54, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 335, - 'wind_gust_speed': 18.98, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.76, - 'temperature': 27.1, - 'uv_index': 2, - 'wind_bearing': 338, - 'wind_gust_speed': 17.04, - 'wind_speed': 7.75, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.05, - 'temperature': 26.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 14.75, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 344, - 'wind_gust_speed': 10.43, - 'wind_speed': 5.2, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.73, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 6.95, - 'wind_speed': 3.59, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 326, - 'wind_gust_speed': 5.27, - 'wind_speed': 2.1, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.52, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 5.48, - 'wind_speed': 0.93, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T13:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 188, - 'wind_gust_speed': 4.44, - 'wind_speed': 1.79, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 4.49, - 'wind_speed': 2.19, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T15:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.21, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 179, - 'wind_gust_speed': 5.32, - 'wind_speed': 2.65, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T16:00:00Z', - 'dew_point': 21.1, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 173, - 'wind_gust_speed': 5.81, - 'wind_speed': 3.2, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T17:00:00Z', - 'dew_point': 20.9, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.88, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 5.53, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 23.3, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.94, - 'temperature': 21.6, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 6.09, - 'wind_speed': 3.36, - }), - dict({ - 'apparent_temperature': 23.1, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T19:00:00Z', - 'dew_point': 20.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.96, - 'temperature': 21.4, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 6.83, - 'wind_speed': 3.71, - }), - dict({ - 'apparent_temperature': 22.5, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T20:00:00Z', - 'dew_point': 20.0, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 21.0, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 7.98, - 'wind_speed': 4.27, - }), - dict({ - 'apparent_temperature': 22.8, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T21:00:00Z', - 'dew_point': 20.2, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.61, - 'temperature': 21.2, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 8.4, - 'wind_speed': 4.69, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-09T22:00:00Z', - 'dew_point': 21.3, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.87, - 'temperature': 23.1, - 'uv_index': 1, - 'wind_bearing': 150, - 'wind_gust_speed': 7.66, - 'wind_speed': 4.33, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-09T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 123, - 'wind_gust_speed': 9.63, - 'wind_speed': 3.91, - }), - dict({ - 'apparent_temperature': 30.4, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 105, - 'wind_gust_speed': 12.59, - 'wind_speed': 3.96, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T01:00:00Z', - 'dew_point': 22.9, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.79, - 'temperature': 28.9, - 'uv_index': 5, - 'wind_bearing': 99, - 'wind_gust_speed': 14.17, - 'wind_speed': 4.06, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T02:00:00Z', - 'dew_point': 22.9, - 'humidity': 66, - 'precipitation': 0.3, - 'precipitation_probability': 7.000000000000001, - 'pressure': 1011.29, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 93, - 'wind_gust_speed': 17.75, - 'wind_speed': 4.87, - }), - dict({ - 'apparent_temperature': 34.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T03:00:00Z', - 'dew_point': 23.1, - 'humidity': 64, - 'precipitation': 0.3, - 'precipitation_probability': 11.0, - 'pressure': 1010.78, - 'temperature': 30.6, - 'uv_index': 6, - 'wind_bearing': 78, - 'wind_gust_speed': 17.43, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T04:00:00Z', - 'dew_point': 23.2, - 'humidity': 66, - 'precipitation': 0.4, - 'precipitation_probability': 15.0, - 'pressure': 1010.37, - 'temperature': 30.3, - 'uv_index': 5, - 'wind_bearing': 60, - 'wind_gust_speed': 15.24, - 'wind_speed': 4.9, - }), - dict({ - 'apparent_temperature': 33.7, - 'cloud_coverage': 79.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T05:00:00Z', - 'dew_point': 23.3, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 17.0, - 'pressure': 1010.09, - 'temperature': 30.0, - 'uv_index': 4, - 'wind_bearing': 80, - 'wind_gust_speed': 13.53, - 'wind_speed': 5.98, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T06:00:00Z', - 'dew_point': 23.4, - 'humidity': 70, - 'precipitation': 1.0, - 'precipitation_probability': 17.0, - 'pressure': 1010.0, - 'temperature': 29.5, - 'uv_index': 3, - 'wind_bearing': 83, - 'wind_gust_speed': 12.55, - 'wind_speed': 6.84, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 88.0, - 'condition': 'rainy', - 'datetime': '2023-09-10T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 73, - 'precipitation': 0.4, - 'precipitation_probability': 16.0, - 'pressure': 1010.27, - 'temperature': 28.7, - 'uv_index': 2, - 'wind_bearing': 90, - 'wind_gust_speed': 10.16, - 'wind_speed': 6.07, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T08:00:00Z', - 'dew_point': 23.2, - 'humidity': 77, - 'precipitation': 0.5, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.71, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 101, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.82, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 93.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T09:00:00Z', - 'dew_point': 23.2, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.9, - 'temperature': 26.5, - 'uv_index': 0, - 'wind_bearing': 128, - 'wind_gust_speed': 8.89, - 'wind_speed': 4.95, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T10:00:00Z', - 'dew_point': 23.0, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.12, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 134, - 'wind_gust_speed': 10.03, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.43, - 'temperature': 25.1, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 12.4, - 'wind_speed': 5.41, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T12:00:00Z', - 'dew_point': 22.5, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.58, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 16.36, - 'wind_speed': 6.31, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T13:00:00Z', - 'dew_point': 22.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 19.66, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.4, - 'temperature': 24.3, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 21.15, - 'wind_speed': 7.46, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T15:00:00Z', - 'dew_point': 22.0, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.26, - 'wind_speed': 7.84, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.01, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 23.53, - 'wind_speed': 8.63, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-10T17:00:00Z', - 'dew_point': 21.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.78, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 22.83, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T18:00:00Z', - 'dew_point': 21.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.69, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.7, - 'wind_speed': 8.7, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T19:00:00Z', - 'dew_point': 21.4, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.77, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 24.24, - 'wind_speed': 8.74, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.89, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 23.99, - 'wind_speed': 8.81, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T21:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.1, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 25.55, - 'wind_speed': 9.05, - }), - dict({ - 'apparent_temperature': 27.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 24.6, - 'uv_index': 1, - 'wind_bearing': 140, - 'wind_gust_speed': 29.08, - 'wind_speed': 10.37, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-10T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.36, - 'temperature': 25.9, - 'uv_index': 2, - 'wind_bearing': 140, - 'wind_gust_speed': 34.13, - 'wind_speed': 12.56, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T00:00:00Z', - 'dew_point': 22.3, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 27.2, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 38.2, - 'wind_speed': 15.65, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T01:00:00Z', - 'dew_point': 22.3, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 37.55, - 'wind_speed': 15.78, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 143, - 'wind_gust_speed': 35.86, - 'wind_speed': 15.41, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T03:00:00Z', - 'dew_point': 22.5, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.61, - 'temperature': 30.3, - 'uv_index': 6, - 'wind_bearing': 141, - 'wind_gust_speed': 35.88, - 'wind_speed': 15.51, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T04:00:00Z', - 'dew_point': 22.6, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.36, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 140, - 'wind_gust_speed': 35.99, - 'wind_speed': 15.75, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T05:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.11, - 'temperature': 30.1, - 'uv_index': 4, - 'wind_bearing': 137, - 'wind_gust_speed': 33.61, - 'wind_speed': 15.36, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T06:00:00Z', - 'dew_point': 22.5, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1009.98, - 'temperature': 30.0, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 32.61, - 'wind_speed': 14.98, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T07:00:00Z', - 'dew_point': 22.2, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.13, - 'temperature': 29.2, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 28.1, - 'wind_speed': 13.88, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T08:00:00Z', - 'dew_point': 22.1, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.48, - 'temperature': 28.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 24.22, - 'wind_speed': 13.02, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-11T09:00:00Z', - 'dew_point': 21.9, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.81, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 22.5, - 'wind_speed': 11.94, - }), - dict({ - 'apparent_temperature': 28.8, - 'cloud_coverage': 63.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T10:00:00Z', - 'dew_point': 21.7, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 137, - 'wind_gust_speed': 21.47, - 'wind_speed': 11.25, - }), - dict({ - 'apparent_temperature': 28.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T11:00:00Z', - 'dew_point': 21.8, - 'humidity': 80, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 22.71, - 'wind_speed': 12.39, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 23.67, - 'wind_speed': 12.83, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T13:00:00Z', - 'dew_point': 21.7, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.97, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 23.34, - 'wind_speed': 12.62, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T14:00:00Z', - 'dew_point': 21.7, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.83, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.9, - 'wind_speed': 12.07, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T15:00:00Z', - 'dew_point': 21.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.74, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 22.01, - 'wind_speed': 11.19, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T16:00:00Z', - 'dew_point': 21.6, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.56, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 21.29, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T17:00:00Z', - 'dew_point': 21.5, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.35, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 20.52, - 'wind_speed': 10.5, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.3, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 20.04, - 'wind_speed': 10.51, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T19:00:00Z', - 'dew_point': 21.3, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 12.0, - 'pressure': 1011.37, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 18.07, - 'wind_speed': 10.13, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T20:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.2, - 'precipitation_probability': 13.0, - 'pressure': 1011.53, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 16.86, - 'wind_speed': 10.34, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T21:00:00Z', - 'dew_point': 21.4, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.71, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 138, - 'wind_gust_speed': 16.66, - 'wind_speed': 10.68, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T22:00:00Z', - 'dew_point': 21.9, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 24.4, - 'uv_index': 1, - 'wind_bearing': 137, - 'wind_gust_speed': 17.21, - 'wind_speed': 10.61, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 78.0, - 'condition': 'cloudy', - 'datetime': '2023-09-11T23:00:00Z', - 'dew_point': 22.3, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.05, - 'temperature': 25.6, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 19.23, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 29.5, - 'cloud_coverage': 79.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T00:00:00Z', - 'dew_point': 22.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.07, - 'temperature': 26.6, - 'uv_index': 3, - 'wind_bearing': 140, - 'wind_gust_speed': 20.61, - 'wind_speed': 11.13, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 82.0, - 'condition': 'rainy', - 'datetime': '2023-09-12T01:00:00Z', - 'dew_point': 23.1, - 'humidity': 75, - 'precipitation': 0.2, - 'precipitation_probability': 16.0, - 'pressure': 1011.89, - 'temperature': 27.9, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 23.35, - 'wind_speed': 11.98, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 85.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.53, - 'temperature': 29.0, - 'uv_index': 5, - 'wind_bearing': 143, - 'wind_gust_speed': 26.45, - 'wind_speed': 13.01, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.15, - 'temperature': 29.8, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 28.95, - 'wind_speed': 13.9, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.79, - 'temperature': 30.2, - 'uv_index': 5, - 'wind_bearing': 141, - 'wind_gust_speed': 27.9, - 'wind_speed': 13.95, - }), - dict({ - 'apparent_temperature': 34.0, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T05:00:00Z', - 'dew_point': 23.1, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.43, - 'temperature': 30.4, - 'uv_index': 4, - 'wind_bearing': 140, - 'wind_gust_speed': 26.53, - 'wind_speed': 13.78, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T06:00:00Z', - 'dew_point': 22.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.21, - 'temperature': 30.1, - 'uv_index': 3, - 'wind_bearing': 138, - 'wind_gust_speed': 24.56, - 'wind_speed': 13.74, - }), - dict({ - 'apparent_temperature': 32.0, - 'cloud_coverage': 53.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.26, - 'temperature': 29.1, - 'uv_index': 2, - 'wind_bearing': 138, - 'wind_gust_speed': 22.78, - 'wind_speed': 13.21, - }), - dict({ - 'apparent_temperature': 30.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.51, - 'temperature': 28.1, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 19.92, - 'wind_speed': 12.0, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T09:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.8, - 'temperature': 27.2, - 'uv_index': 0, - 'wind_bearing': 141, - 'wind_gust_speed': 17.65, - 'wind_speed': 10.97, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T10:00:00Z', - 'dew_point': 21.4, - 'humidity': 75, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.23, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 143, - 'wind_gust_speed': 15.87, - 'wind_speed': 10.23, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T11:00:00Z', - 'dew_point': 21.3, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1011.79, - 'temperature': 25.4, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 13.9, - 'wind_speed': 9.39, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-12T12:00:00Z', - 'dew_point': 21.2, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 47.0, - 'pressure': 1012.12, - 'temperature': 24.7, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.32, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1012.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.18, - 'wind_speed': 8.59, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T14:00:00Z', - 'dew_point': 21.3, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.09, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 13.84, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T15:00:00Z', - 'dew_point': 21.3, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.99, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.93, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T16:00:00Z', - 'dew_point': 21.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 16.74, - 'wind_speed': 9.49, - }), - dict({ - 'apparent_temperature': 24.7, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T17:00:00Z', - 'dew_point': 20.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.75, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 146, - 'wind_gust_speed': 17.45, - 'wind_speed': 9.12, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T18:00:00Z', - 'dew_point': 20.7, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.77, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.04, - 'wind_speed': 8.68, - }), - dict({ - 'apparent_temperature': 24.1, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T19:00:00Z', - 'dew_point': 20.6, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.93, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 16.8, - 'wind_speed': 8.61, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T20:00:00Z', - 'dew_point': 20.5, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.23, - 'temperature': 22.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.35, - 'wind_speed': 8.36, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 75.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T21:00:00Z', - 'dew_point': 20.6, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.49, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 14.09, - 'wind_speed': 7.77, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T22:00:00Z', - 'dew_point': 21.0, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.72, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 152, - 'wind_gust_speed': 14.04, - 'wind_speed': 7.25, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-12T23:00:00Z', - 'dew_point': 21.4, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 25.5, - 'uv_index': 2, - 'wind_bearing': 149, - 'wind_gust_speed': 15.31, - 'wind_speed': 7.14, - }), - dict({ - 'apparent_temperature': 29.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-13T00:00:00Z', - 'dew_point': 21.8, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 27.1, - 'uv_index': 4, - 'wind_bearing': 141, - 'wind_gust_speed': 16.42, - 'wind_speed': 6.89, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T01:00:00Z', - 'dew_point': 22.0, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.65, - 'temperature': 28.4, - 'uv_index': 5, - 'wind_bearing': 137, - 'wind_gust_speed': 18.64, - 'wind_speed': 6.65, - }), - dict({ - 'apparent_temperature': 32.3, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T02:00:00Z', - 'dew_point': 21.9, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.26, - 'temperature': 29.4, - 'uv_index': 5, - 'wind_bearing': 128, - 'wind_gust_speed': 21.69, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 33.0, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T03:00:00Z', - 'dew_point': 21.9, - 'humidity': 62, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.88, - 'temperature': 30.1, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 23.41, - 'wind_speed': 7.33, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T04:00:00Z', - 'dew_point': 22.0, - 'humidity': 61, - 'precipitation': 0.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.55, - 'temperature': 30.4, - 'uv_index': 5, - 'wind_bearing': 56, - 'wind_gust_speed': 23.1, - 'wind_speed': 8.09, - }), - dict({ - 'apparent_temperature': 33.2, - 'cloud_coverage': 72.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 61, - 'precipitation': 1.9, - 'precipitation_probability': 12.0, - 'pressure': 1011.29, - 'temperature': 30.2, - 'uv_index': 4, - 'wind_bearing': 20, - 'wind_gust_speed': 21.81, - 'wind_speed': 9.46, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 74.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T06:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 2.3, - 'precipitation_probability': 11.0, - 'pressure': 1011.17, - 'temperature': 29.7, - 'uv_index': 3, - 'wind_bearing': 20, - 'wind_gust_speed': 19.72, - 'wind_speed': 9.8, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 69.0, - 'condition': 'rainy', - 'datetime': '2023-09-13T07:00:00Z', - 'dew_point': 22.4, - 'humidity': 68, - 'precipitation': 1.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.32, - 'temperature': 28.8, - 'uv_index': 1, - 'wind_bearing': 18, - 'wind_gust_speed': 17.55, - 'wind_speed': 9.23, - }), - dict({ - 'apparent_temperature': 30.8, - 'cloud_coverage': 73.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T08:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.8, - 'precipitation_probability': 10.0, - 'pressure': 1011.6, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 27, - 'wind_gust_speed': 15.08, - 'wind_speed': 8.05, - }), - dict({ - 'apparent_temperature': 29.4, - 'cloud_coverage': 76.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T09:00:00Z', - 'dew_point': 23.0, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.94, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 32, - 'wind_gust_speed': 12.17, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T10:00:00Z', - 'dew_point': 22.9, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.3, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 69, - 'wind_gust_speed': 11.64, - 'wind_speed': 6.69, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.71, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 11.91, - 'wind_speed': 6.23, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.96, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.47, - 'wind_speed': 5.73, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 82.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T13:00:00Z', - 'dew_point': 22.3, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.03, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 13.57, - 'wind_speed': 5.66, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 84.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T14:00:00Z', - 'dew_point': 22.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.99, - 'temperature': 23.9, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 15.07, - 'wind_speed': 5.83, - }), - dict({ - 'apparent_temperature': 26.1, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T15:00:00Z', - 'dew_point': 22.2, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.95, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 16.06, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 88.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T16:00:00Z', - 'dew_point': 22.0, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.9, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 153, - 'wind_gust_speed': 16.05, - 'wind_speed': 5.75, - }), - dict({ - 'apparent_temperature': 25.4, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T17:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.85, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 150, - 'wind_gust_speed': 15.52, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 92.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T18:00:00Z', - 'dew_point': 21.8, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.87, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 15.01, - 'wind_speed': 5.32, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 90.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 22.8, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.39, - 'wind_speed': 5.33, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 89.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T20:00:00Z', - 'dew_point': 21.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.22, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 13.79, - 'wind_speed': 5.43, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 86.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 147, - 'wind_gust_speed': 14.12, - 'wind_speed': 5.52, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 77.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T22:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.59, - 'temperature': 24.3, - 'uv_index': 1, - 'wind_bearing': 147, - 'wind_gust_speed': 16.14, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-13T23:00:00Z', - 'dew_point': 22.4, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.74, - 'temperature': 25.7, - 'uv_index': 2, - 'wind_bearing': 146, - 'wind_gust_speed': 19.09, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.78, - 'temperature': 27.4, - 'uv_index': 4, - 'wind_bearing': 143, - 'wind_gust_speed': 21.6, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 32.2, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T01:00:00Z', - 'dew_point': 23.2, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.61, - 'temperature': 28.7, - 'uv_index': 5, - 'wind_bearing': 138, - 'wind_gust_speed': 23.36, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 54.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T02:00:00Z', - 'dew_point': 23.2, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.32, - 'temperature': 29.9, - 'uv_index': 6, - 'wind_bearing': 111, - 'wind_gust_speed': 24.72, - 'wind_speed': 4.99, - }), - dict({ - 'apparent_temperature': 34.4, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T03:00:00Z', - 'dew_point': 23.3, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.04, - 'temperature': 30.7, - 'uv_index': 6, - 'wind_bearing': 354, - 'wind_gust_speed': 25.23, - 'wind_speed': 4.74, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T04:00:00Z', - 'dew_point': 23.4, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.77, - 'temperature': 31.0, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 24.6, - 'wind_speed': 4.79, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 60.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T05:00:00Z', - 'dew_point': 23.2, - 'humidity': 64, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1012.53, - 'temperature': 30.7, - 'uv_index': 5, - 'wind_bearing': 336, - 'wind_gust_speed': 23.28, - 'wind_speed': 5.07, - }), - dict({ - 'apparent_temperature': 33.8, - 'cloud_coverage': 59.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T06:00:00Z', - 'dew_point': 23.1, - 'humidity': 66, - 'precipitation': 0.2, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1012.49, - 'temperature': 30.2, - 'uv_index': 3, - 'wind_bearing': 336, - 'wind_gust_speed': 22.05, - 'wind_speed': 5.34, - }), - dict({ - 'apparent_temperature': 32.9, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-14T07:00:00Z', - 'dew_point': 23.0, - 'humidity': 68, - 'precipitation': 0.2, - 'precipitation_probability': 40.0, - 'pressure': 1012.73, - 'temperature': 29.5, - 'uv_index': 2, - 'wind_bearing': 339, - 'wind_gust_speed': 21.18, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 31.6, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T08:00:00Z', - 'dew_point': 22.8, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 45.0, - 'pressure': 1013.16, - 'temperature': 28.4, - 'uv_index': 0, - 'wind_bearing': 342, - 'wind_gust_speed': 20.35, - 'wind_speed': 5.93, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T09:00:00Z', - 'dew_point': 22.5, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1013.62, - 'temperature': 27.1, - 'uv_index': 0, - 'wind_bearing': 347, - 'wind_gust_speed': 19.42, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 29.0, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T10:00:00Z', - 'dew_point': 22.4, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.09, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 348, - 'wind_gust_speed': 18.19, - 'wind_speed': 5.31, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T11:00:00Z', - 'dew_point': 22.4, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.56, - 'temperature': 25.5, - 'uv_index': 0, - 'wind_bearing': 177, - 'wind_gust_speed': 16.79, - 'wind_speed': 4.28, - }), - dict({ - 'apparent_temperature': 27.5, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T12:00:00Z', - 'dew_point': 22.3, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.87, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 15.61, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T13:00:00Z', - 'dew_point': 22.1, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.91, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 14.7, - 'wind_speed': 4.11, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T14:00:00Z', - 'dew_point': 21.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.8, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 171, - 'wind_gust_speed': 13.81, - 'wind_speed': 4.97, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T15:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.66, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 170, - 'wind_gust_speed': 12.88, - 'wind_speed': 5.57, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T16:00:00Z', - 'dew_point': 21.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.54, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 12.0, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 24.4, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T17:00:00Z', - 'dew_point': 21.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 40.0, - 'pressure': 1014.45, - 'temperature': 22.4, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 11.43, - 'wind_speed': 5.48, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T18:00:00Z', - 'dew_point': 21.4, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 44.0, - 'pressure': 1014.45, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 11.42, - 'wind_speed': 5.38, - }), - dict({ - 'apparent_temperature': 25.0, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T19:00:00Z', - 'dew_point': 21.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 52.0, - 'pressure': 1014.63, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 12.15, - 'wind_speed': 5.39, - }), - dict({ - 'apparent_temperature': 25.6, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-14T20:00:00Z', - 'dew_point': 21.8, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 51.0, - 'pressure': 1014.91, - 'temperature': 23.4, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 13.54, - 'wind_speed': 5.45, - }), - dict({ - 'apparent_temperature': 26.6, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T21:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 42.0, - 'pressure': 1015.18, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 15.48, - 'wind_speed': 5.62, - }), - dict({ - 'apparent_temperature': 28.5, - 'cloud_coverage': 32.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T22:00:00Z', - 'dew_point': 22.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 28.999999999999996, - 'pressure': 1015.4, - 'temperature': 25.7, - 'uv_index': 1, - 'wind_bearing': 158, - 'wind_gust_speed': 17.86, - 'wind_speed': 5.84, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-14T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 77, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.54, - 'temperature': 27.2, - 'uv_index': 2, - 'wind_bearing': 155, - 'wind_gust_speed': 20.19, - 'wind_speed': 6.09, - }), - dict({ - 'apparent_temperature': 32.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T00:00:00Z', - 'dew_point': 23.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.55, - 'temperature': 28.6, - 'uv_index': 4, - 'wind_bearing': 152, - 'wind_gust_speed': 21.83, - 'wind_speed': 6.42, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-15T01:00:00Z', - 'dew_point': 23.5, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.35, - 'temperature': 29.6, - 'uv_index': 6, - 'wind_bearing': 144, - 'wind_gust_speed': 22.56, - 'wind_speed': 6.91, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T02:00:00Z', - 'dew_point': 23.5, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.0, - 'temperature': 30.4, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.83, - 'wind_speed': 7.47, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T03:00:00Z', - 'dew_point': 23.5, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.62, - 'temperature': 30.9, - 'uv_index': 7, - 'wind_bearing': 336, - 'wind_gust_speed': 22.98, - 'wind_speed': 7.95, - }), - dict({ - 'apparent_temperature': 35.4, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T04:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 31.3, - 'uv_index': 6, - 'wind_bearing': 341, - 'wind_gust_speed': 23.21, - 'wind_speed': 8.44, - }), - dict({ - 'apparent_temperature': 35.6, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T05:00:00Z', - 'dew_point': 23.7, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.95, - 'temperature': 31.5, - 'uv_index': 5, - 'wind_bearing': 344, - 'wind_gust_speed': 23.46, - 'wind_speed': 8.95, - }), - dict({ - 'apparent_temperature': 35.1, - 'cloud_coverage': 42.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T06:00:00Z', - 'dew_point': 23.6, - 'humidity': 64, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.83, - 'temperature': 31.1, - 'uv_index': 3, - 'wind_bearing': 347, - 'wind_gust_speed': 23.64, - 'wind_speed': 9.13, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T07:00:00Z', - 'dew_point': 23.4, - 'humidity': 66, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.96, - 'temperature': 30.3, - 'uv_index': 2, - 'wind_bearing': 350, - 'wind_gust_speed': 23.66, - 'wind_speed': 8.78, - }), - dict({ - 'apparent_temperature': 32.4, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T08:00:00Z', - 'dew_point': 23.1, - 'humidity': 70, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.25, - 'temperature': 29.0, - 'uv_index': 0, - 'wind_bearing': 356, - 'wind_gust_speed': 23.51, - 'wind_speed': 8.13, - }), - dict({ - 'apparent_temperature': 31.1, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T09:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.61, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 3, - 'wind_gust_speed': 23.21, - 'wind_speed': 7.48, - }), - dict({ - 'apparent_temperature': 30.0, - 'cloud_coverage': 43.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T10:00:00Z', - 'dew_point': 22.8, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.02, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 20, - 'wind_gust_speed': 22.68, - 'wind_speed': 6.83, - }), - dict({ - 'apparent_temperature': 29.2, - 'cloud_coverage': 46.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T11:00:00Z', - 'dew_point': 22.8, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.43, - 'temperature': 26.2, - 'uv_index': 0, - 'wind_bearing': 129, - 'wind_gust_speed': 22.04, - 'wind_speed': 6.1, - }), - dict({ - 'apparent_temperature': 28.4, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T12:00:00Z', - 'dew_point': 22.7, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.71, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.64, - 'wind_speed': 5.6, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T13:00:00Z', - 'dew_point': 23.2, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.52, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 16.35, - 'wind_speed': 5.58, - }), - dict({ - 'apparent_temperature': 27.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T14:00:00Z', - 'dew_point': 22.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.37, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 168, - 'wind_gust_speed': 17.11, - 'wind_speed': 5.79, - }), - dict({ - 'apparent_temperature': 26.9, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.21, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 17.32, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.4, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 16.6, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T17:00:00Z', - 'dew_point': 22.5, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.95, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 219, - 'wind_gust_speed': 15.52, - 'wind_speed': 4.62, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T18:00:00Z', - 'dew_point': 22.3, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.88, - 'temperature': 23.3, - 'uv_index': 0, - 'wind_bearing': 216, - 'wind_gust_speed': 14.64, - 'wind_speed': 4.32, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T19:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.91, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 198, - 'wind_gust_speed': 14.06, - 'wind_speed': 4.73, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T20:00:00Z', - 'dew_point': 22.4, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.99, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 189, - 'wind_gust_speed': 13.7, - 'wind_speed': 5.49, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-15T21:00:00Z', - 'dew_point': 22.5, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.07, - 'temperature': 24.4, - 'uv_index': 0, - 'wind_bearing': 183, - 'wind_gust_speed': 13.77, - 'wind_speed': 5.95, - }), - dict({ - 'apparent_temperature': 28.3, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 84, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.12, - 'temperature': 25.5, - 'uv_index': 1, - 'wind_bearing': 179, - 'wind_gust_speed': 14.38, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 29.9, - 'cloud_coverage': 52.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-15T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.13, - 'temperature': 26.9, - 'uv_index': 2, - 'wind_bearing': 170, - 'wind_gust_speed': 15.2, - 'wind_speed': 5.27, - }), - dict({ - 'apparent_temperature': 31.2, - 'cloud_coverage': 44.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T00:00:00Z', - 'dew_point': 22.9, - 'humidity': 74, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1015.04, - 'temperature': 28.0, - 'uv_index': 4, - 'wind_bearing': 155, - 'wind_gust_speed': 15.85, - 'wind_speed': 4.76, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 24.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T01:00:00Z', - 'dew_point': 22.6, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.52, - 'temperature': 29.2, - 'uv_index': 6, - 'wind_bearing': 110, - 'wind_gust_speed': 16.27, - 'wind_speed': 6.81, - }), - dict({ - 'apparent_temperature': 33.5, - 'cloud_coverage': 16.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T02:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1014.01, - 'temperature': 30.2, - 'uv_index': 8, - 'wind_bearing': 30, - 'wind_gust_speed': 16.55, - 'wind_speed': 6.86, - }), - dict({ - 'apparent_temperature': 34.2, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T03:00:00Z', - 'dew_point': 22.0, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.45, - 'temperature': 31.1, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.52, - 'wind_speed': 6.8, - }), - dict({ - 'apparent_temperature': 34.7, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T04:00:00Z', - 'dew_point': 21.9, - 'humidity': 57, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.89, - 'temperature': 31.5, - 'uv_index': 8, - 'wind_bearing': 17, - 'wind_gust_speed': 16.08, - 'wind_speed': 6.62, - }), - dict({ - 'apparent_temperature': 34.9, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T05:00:00Z', - 'dew_point': 21.9, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.39, - 'temperature': 31.8, - 'uv_index': 6, - 'wind_bearing': 20, - 'wind_gust_speed': 15.48, - 'wind_speed': 6.45, - }), - dict({ - 'apparent_temperature': 34.5, - 'cloud_coverage': 10.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T06:00:00Z', - 'dew_point': 21.7, - 'humidity': 56, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.11, - 'temperature': 31.4, - 'uv_index': 4, - 'wind_bearing': 26, - 'wind_gust_speed': 15.08, - 'wind_speed': 6.43, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 7.000000000000001, - 'condition': 'sunny', - 'datetime': '2023-09-16T07:00:00Z', - 'dew_point': 21.7, - 'humidity': 59, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.15, - 'temperature': 30.7, - 'uv_index': 2, - 'wind_bearing': 39, - 'wind_gust_speed': 14.88, - 'wind_speed': 6.61, - }), - dict({ - 'apparent_temperature': 32.5, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 63, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.41, - 'temperature': 29.6, - 'uv_index': 0, - 'wind_bearing': 72, - 'wind_gust_speed': 14.82, - 'wind_speed': 6.95, - }), - dict({ - 'apparent_temperature': 31.4, - 'cloud_coverage': 2.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T09:00:00Z', - 'dew_point': 22.1, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.75, - 'temperature': 28.5, - 'uv_index': 0, - 'wind_bearing': 116, - 'wind_gust_speed': 15.13, - 'wind_speed': 7.45, - }), - dict({ - 'apparent_temperature': 30.5, - 'cloud_coverage': 13.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T10:00:00Z', - 'dew_point': 22.3, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.13, - 'temperature': 27.6, - 'uv_index': 0, - 'wind_bearing': 140, - 'wind_gust_speed': 16.09, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 31.0, - 'condition': 'sunny', - 'datetime': '2023-09-16T11:00:00Z', - 'dew_point': 22.6, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.47, - 'temperature': 26.9, - 'uv_index': 0, - 'wind_bearing': 149, - 'wind_gust_speed': 17.37, - 'wind_speed': 8.87, - }), - dict({ - 'apparent_temperature': 29.3, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T12:00:00Z', - 'dew_point': 22.9, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.6, - 'temperature': 26.3, - 'uv_index': 0, - 'wind_bearing': 155, - 'wind_gust_speed': 18.29, - 'wind_speed': 9.21, - }), - dict({ - 'apparent_temperature': 28.7, - 'cloud_coverage': 51.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T13:00:00Z', - 'dew_point': 23.0, - 'humidity': 85, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.41, - 'temperature': 25.7, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 18.49, - 'wind_speed': 8.96, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 55.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T14:00:00Z', - 'dew_point': 22.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1013.01, - 'temperature': 25.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.47, - 'wind_speed': 8.45, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T15:00:00Z', - 'dew_point': 22.7, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.55, - 'temperature': 24.5, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 18.79, - 'wind_speed': 8.1, - }), - dict({ - 'apparent_temperature': 26.7, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T16:00:00Z', - 'dew_point': 22.6, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.1, - 'temperature': 24.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 19.81, - 'wind_speed': 8.15, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T17:00:00Z', - 'dew_point': 22.6, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.68, - 'temperature': 23.7, - 'uv_index': 0, - 'wind_bearing': 161, - 'wind_gust_speed': 20.96, - 'wind_speed': 8.3, - }), - dict({ - 'apparent_temperature': 26.0, - 'cloud_coverage': 72.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T18:00:00Z', - 'dew_point': 22.4, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 21.41, - 'wind_speed': 8.24, - }), - dict({ - 'apparent_temperature': 26.3, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T19:00:00Z', - 'dew_point': 22.5, - 'humidity': 93, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.29, - 'temperature': 23.8, - 'uv_index': 0, - 'wind_bearing': 159, - 'wind_gust_speed': 20.42, - 'wind_speed': 7.62, - }), - dict({ - 'apparent_temperature': 26.8, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-16T20:00:00Z', - 'dew_point': 22.6, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.31, - 'temperature': 24.2, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 18.61, - 'wind_speed': 6.66, - }), - dict({ - 'apparent_temperature': 27.7, - 'cloud_coverage': 57.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T21:00:00Z', - 'dew_point': 22.6, - 'humidity': 87, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 24.9, - 'uv_index': 0, - 'wind_bearing': 158, - 'wind_gust_speed': 17.14, - 'wind_speed': 5.86, - }), - dict({ - 'apparent_temperature': 28.9, - 'cloud_coverage': 48.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T22:00:00Z', - 'dew_point': 22.6, - 'humidity': 82, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.46, - 'temperature': 26.0, - 'uv_index': 1, - 'wind_bearing': 161, - 'wind_gust_speed': 16.78, - 'wind_speed': 5.5, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 39.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-16T23:00:00Z', - 'dew_point': 22.9, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.51, - 'temperature': 27.5, - 'uv_index': 2, - 'wind_bearing': 165, - 'wind_gust_speed': 17.21, - 'wind_speed': 5.56, - }), - dict({ - 'apparent_temperature': 31.7, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T00:00:00Z', - 'dew_point': 22.8, - 'humidity': 71, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.39, - 'temperature': 28.5, - 'uv_index': 4, - 'wind_bearing': 174, - 'wind_gust_speed': 17.96, - 'wind_speed': 6.04, - }), - dict({ - 'apparent_temperature': 32.6, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T01:00:00Z', - 'dew_point': 22.7, - 'humidity': 68, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.98, - 'temperature': 29.4, - 'uv_index': 6, - 'wind_bearing': 192, - 'wind_gust_speed': 19.15, - 'wind_speed': 7.23, - }), - dict({ - 'apparent_temperature': 33.6, - 'cloud_coverage': 28.999999999999996, - 'condition': 'sunny', - 'datetime': '2023-09-17T02:00:00Z', - 'dew_point': 22.8, - 'humidity': 65, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1010.38, - 'temperature': 30.1, - 'uv_index': 7, - 'wind_bearing': 225, - 'wind_gust_speed': 20.89, - 'wind_speed': 8.9, - }), - dict({ - 'apparent_temperature': 34.1, - 'cloud_coverage': 30.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T03:00:00Z', - 'dew_point': 22.8, - 'humidity': 63, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1009.75, - 'temperature': 30.7, - 'uv_index': 8, - 'wind_bearing': 264, - 'wind_gust_speed': 22.67, - 'wind_speed': 10.27, - }), - dict({ - 'apparent_temperature': 33.9, - 'cloud_coverage': 37.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T04:00:00Z', - 'dew_point': 22.5, - 'humidity': 62, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1009.18, - 'temperature': 30.5, - 'uv_index': 7, - 'wind_bearing': 293, - 'wind_gust_speed': 23.93, - 'wind_speed': 10.82, - }), - dict({ - 'apparent_temperature': 33.4, - 'cloud_coverage': 45.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T05:00:00Z', - 'dew_point': 22.4, - 'humidity': 63, - 'precipitation': 0.6, - 'precipitation_probability': 12.0, - 'pressure': 1008.71, - 'temperature': 30.1, - 'uv_index': 5, - 'wind_bearing': 308, - 'wind_gust_speed': 24.39, - 'wind_speed': 10.72, - }), - dict({ - 'apparent_temperature': 32.7, - 'cloud_coverage': 50.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T06:00:00Z', - 'dew_point': 22.2, - 'humidity': 64, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.46, - 'temperature': 29.6, - 'uv_index': 3, - 'wind_bearing': 312, - 'wind_gust_speed': 23.9, - 'wind_speed': 10.28, - }), - dict({ - 'apparent_temperature': 31.8, - 'cloud_coverage': 47.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T07:00:00Z', - 'dew_point': 22.1, - 'humidity': 67, - 'precipitation': 0.7, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1008.53, - 'temperature': 28.9, - 'uv_index': 1, - 'wind_bearing': 312, - 'wind_gust_speed': 22.3, - 'wind_speed': 9.59, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 41.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T08:00:00Z', - 'dew_point': 21.9, - 'humidity': 70, - 'precipitation': 0.6, - 'precipitation_probability': 15.0, - 'pressure': 1008.82, - 'temperature': 27.9, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 19.73, - 'wind_speed': 8.58, - }), - dict({ - 'apparent_temperature': 29.6, - 'cloud_coverage': 35.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T09:00:00Z', - 'dew_point': 22.0, - 'humidity': 74, - 'precipitation': 0.5, - 'precipitation_probability': 15.0, - 'pressure': 1009.21, - 'temperature': 27.0, - 'uv_index': 0, - 'wind_bearing': 291, - 'wind_gust_speed': 16.49, - 'wind_speed': 7.34, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 33.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T10:00:00Z', - 'dew_point': 21.9, - 'humidity': 78, - 'precipitation': 0.4, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1009.65, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 257, - 'wind_gust_speed': 12.71, - 'wind_speed': 5.91, - }), - dict({ - 'apparent_temperature': 27.8, - 'cloud_coverage': 34.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T11:00:00Z', - 'dew_point': 21.9, - 'humidity': 82, - 'precipitation': 0.3, - 'precipitation_probability': 14.000000000000002, - 'pressure': 1010.04, - 'temperature': 25.3, - 'uv_index': 0, - 'wind_bearing': 212, - 'wind_gust_speed': 9.16, - 'wind_speed': 4.54, - }), - dict({ - 'apparent_temperature': 27.1, - 'cloud_coverage': 36.0, - 'condition': 'sunny', - 'datetime': '2023-09-17T12:00:00Z', - 'dew_point': 21.9, - 'humidity': 85, - 'precipitation': 0.3, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1010.24, - 'temperature': 24.6, - 'uv_index': 0, - 'wind_bearing': 192, - 'wind_gust_speed': 7.09, - 'wind_speed': 3.62, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 40.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T13:00:00Z', - 'dew_point': 22.0, - 'humidity': 88, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1010.15, - 'temperature': 24.1, - 'uv_index': 0, - 'wind_bearing': 185, - 'wind_gust_speed': 7.2, - 'wind_speed': 3.27, - }), - dict({ - 'apparent_temperature': 25.9, - 'cloud_coverage': 44.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T14:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.3, - 'precipitation_probability': 30.0, - 'pressure': 1009.87, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.22, - }), - dict({ - 'apparent_temperature': 25.5, - 'cloud_coverage': 49.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T15:00:00Z', - 'dew_point': 21.8, - 'humidity': 92, - 'precipitation': 0.2, - 'precipitation_probability': 31.0, - 'pressure': 1009.56, - 'temperature': 23.2, - 'uv_index': 0, - 'wind_bearing': 180, - 'wind_gust_speed': 9.21, - 'wind_speed': 3.3, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 53.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T16:00:00Z', - 'dew_point': 21.8, - 'humidity': 94, - 'precipitation': 0.2, - 'precipitation_probability': 33.0, - 'pressure': 1009.29, - 'temperature': 22.9, - 'uv_index': 0, - 'wind_bearing': 182, - 'wind_gust_speed': 9.0, - 'wind_speed': 3.46, - }), - dict({ - 'apparent_temperature': 24.8, - 'cloud_coverage': 56.00000000000001, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T17:00:00Z', - 'dew_point': 21.7, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 35.0, - 'pressure': 1009.09, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 186, - 'wind_gust_speed': 8.37, - 'wind_speed': 3.72, - }), - dict({ - 'apparent_temperature': 24.6, - 'cloud_coverage': 59.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T18:00:00Z', - 'dew_point': 21.6, - 'humidity': 95, - 'precipitation': 0.0, - 'precipitation_probability': 37.0, - 'pressure': 1009.01, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 201, - 'wind_gust_speed': 7.99, - 'wind_speed': 4.07, - }), - dict({ - 'apparent_temperature': 24.9, - 'cloud_coverage': 62.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-17T19:00:00Z', - 'dew_point': 21.7, - 'humidity': 94, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.07, - 'temperature': 22.7, - 'uv_index': 0, - 'wind_bearing': 258, - 'wind_gust_speed': 8.18, - 'wind_speed': 4.55, - }), - dict({ - 'apparent_temperature': 25.2, - 'cloud_coverage': 64.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T20:00:00Z', - 'dew_point': 21.7, - 'humidity': 92, - 'precipitation': 0.0, - 'precipitation_probability': 39.0, - 'pressure': 1009.23, - 'temperature': 23.0, - 'uv_index': 0, - 'wind_bearing': 305, - 'wind_gust_speed': 8.77, - 'wind_speed': 5.17, - }), - dict({ - 'apparent_temperature': 25.8, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T21:00:00Z', - 'dew_point': 21.8, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 38.0, - 'pressure': 1009.47, - 'temperature': 23.5, - 'uv_index': 0, - 'wind_bearing': 318, - 'wind_gust_speed': 9.69, - 'wind_speed': 5.77, - }), - dict({ - 'apparent_temperature': 26.5, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-17T22:00:00Z', - 'dew_point': 21.8, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 30.0, - 'pressure': 1009.77, - 'temperature': 24.2, - 'uv_index': 1, - 'wind_bearing': 324, - 'wind_gust_speed': 10.88, - 'wind_speed': 6.26, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 80.0, - 'condition': 'rainy', - 'datetime': '2023-09-17T23:00:00Z', - 'dew_point': 21.9, - 'humidity': 83, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.09, - 'temperature': 25.1, - 'uv_index': 2, - 'wind_bearing': 329, - 'wind_gust_speed': 12.21, - 'wind_speed': 6.68, - }), - dict({ - 'apparent_temperature': 28.2, - 'cloud_coverage': 87.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T00:00:00Z', - 'dew_point': 21.9, - 'humidity': 80, - 'precipitation': 0.2, - 'precipitation_probability': 15.0, - 'pressure': 1010.33, - 'temperature': 25.7, - 'uv_index': 3, - 'wind_bearing': 332, - 'wind_gust_speed': 13.52, - 'wind_speed': 7.12, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T01:00:00Z', - 'dew_point': 21.7, - 'humidity': 72, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1007.43, - 'temperature': 27.2, - 'uv_index': 5, - 'wind_bearing': 330, - 'wind_gust_speed': 11.36, - 'wind_speed': 11.36, - }), - dict({ - 'apparent_temperature': 30.1, - 'cloud_coverage': 70.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T02:00:00Z', - 'dew_point': 21.6, - 'humidity': 70, - 'precipitation': 0.3, - 'precipitation_probability': 9.0, - 'pressure': 1007.05, - 'temperature': 27.5, - 'uv_index': 6, - 'wind_bearing': 332, - 'wind_gust_speed': 12.06, - 'wind_speed': 12.06, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 71.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T03:00:00Z', - 'dew_point': 21.6, - 'humidity': 69, - 'precipitation': 0.5, - 'precipitation_probability': 10.0, - 'pressure': 1006.67, - 'temperature': 27.8, - 'uv_index': 6, - 'wind_bearing': 333, - 'wind_gust_speed': 12.81, - 'wind_speed': 12.81, - }), - dict({ - 'apparent_temperature': 30.6, - 'cloud_coverage': 67.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T04:00:00Z', - 'dew_point': 21.5, - 'humidity': 68, - 'precipitation': 0.4, - 'precipitation_probability': 10.0, - 'pressure': 1006.28, - 'temperature': 28.0, - 'uv_index': 5, - 'wind_bearing': 335, - 'wind_gust_speed': 13.68, - 'wind_speed': 13.68, - }), - dict({ - 'apparent_temperature': 30.7, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T05:00:00Z', - 'dew_point': 21.4, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1005.89, - 'temperature': 28.1, - 'uv_index': 4, - 'wind_bearing': 336, - 'wind_gust_speed': 14.61, - 'wind_speed': 14.61, - }), - dict({ - 'apparent_temperature': 30.3, - 'cloud_coverage': 56.99999999999999, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T06:00:00Z', - 'dew_point': 21.2, - 'humidity': 67, - 'precipitation': 0.0, - 'precipitation_probability': 27.0, - 'pressure': 1005.67, - 'temperature': 27.9, - 'uv_index': 3, - 'wind_bearing': 338, - 'wind_gust_speed': 15.25, - 'wind_speed': 15.25, - }), - dict({ - 'apparent_temperature': 29.8, - 'cloud_coverage': 60.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T07:00:00Z', - 'dew_point': 21.3, - 'humidity': 69, - 'precipitation': 0.0, - 'precipitation_probability': 28.000000000000004, - 'pressure': 1005.74, - 'temperature': 27.4, - 'uv_index': 1, - 'wind_bearing': 339, - 'wind_gust_speed': 15.45, - 'wind_speed': 15.45, - }), - dict({ - 'apparent_temperature': 29.1, - 'cloud_coverage': 65.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T08:00:00Z', - 'dew_point': 21.4, - 'humidity': 73, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1005.98, - 'temperature': 26.7, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.38, - 'wind_speed': 15.38, - }), - dict({ - 'apparent_temperature': 28.6, - 'cloud_coverage': 68.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T09:00:00Z', - 'dew_point': 21.6, - 'humidity': 76, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.22, - 'temperature': 26.1, - 'uv_index': 0, - 'wind_bearing': 341, - 'wind_gust_speed': 15.27, - 'wind_speed': 15.27, - }), - dict({ - 'apparent_temperature': 27.9, - 'cloud_coverage': 66.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T10:00:00Z', - 'dew_point': 21.6, - 'humidity': 79, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1006.44, - 'temperature': 25.6, - 'uv_index': 0, - 'wind_bearing': 339, - 'wind_gust_speed': 15.09, - 'wind_speed': 15.09, - }), - dict({ - 'apparent_temperature': 27.6, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T11:00:00Z', - 'dew_point': 21.7, - 'humidity': 81, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.66, - 'temperature': 25.2, - 'uv_index': 0, - 'wind_bearing': 336, - 'wind_gust_speed': 14.88, - 'wind_speed': 14.88, - }), - dict({ - 'apparent_temperature': 27.2, - 'cloud_coverage': 61.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T12:00:00Z', - 'dew_point': 21.8, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 26.0, - 'pressure': 1006.79, - 'temperature': 24.8, - 'uv_index': 0, - 'wind_bearing': 333, - 'wind_gust_speed': 14.91, - 'wind_speed': 14.91, - }), - dict({ - 'apparent_temperature': 25.7, - 'cloud_coverage': 38.0, - 'condition': 'partlycloudy', - 'datetime': '2023-09-18T13:00:00Z', - 'dew_point': 21.2, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1012.36, - 'temperature': 23.6, - 'uv_index': 0, - 'wind_bearing': 83, - 'wind_gust_speed': 4.58, - 'wind_speed': 3.16, - }), - dict({ - 'apparent_temperature': 25.1, - 'cloud_coverage': 74.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T14:00:00Z', - 'dew_point': 21.2, - 'humidity': 89, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.96, - 'temperature': 23.1, - 'uv_index': 0, - 'wind_bearing': 144, - 'wind_gust_speed': 4.74, - 'wind_speed': 4.52, - }), - dict({ - 'apparent_temperature': 24.5, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T15:00:00Z', - 'dew_point': 20.9, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.6, - 'temperature': 22.6, - 'uv_index': 0, - 'wind_bearing': 152, - 'wind_gust_speed': 5.63, - 'wind_speed': 5.63, - }), - dict({ - 'apparent_temperature': 24.0, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T16:00:00Z', - 'dew_point': 20.7, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.37, - 'temperature': 22.3, - 'uv_index': 0, - 'wind_bearing': 156, - 'wind_gust_speed': 6.02, - 'wind_speed': 6.02, - }), - dict({ - 'apparent_temperature': 23.7, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T17:00:00Z', - 'dew_point': 20.4, - 'humidity': 91, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.2, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 6.15, - 'wind_speed': 6.15, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T18:00:00Z', - 'dew_point': 20.2, - 'humidity': 90, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.08, - 'temperature': 21.9, - 'uv_index': 0, - 'wind_bearing': 167, - 'wind_gust_speed': 6.48, - 'wind_speed': 6.48, - }), - dict({ - 'apparent_temperature': 23.2, - 'cloud_coverage': 100.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T19:00:00Z', - 'dew_point': 19.8, - 'humidity': 88, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.04, - 'temperature': 21.8, - 'uv_index': 0, - 'wind_bearing': 165, - 'wind_gust_speed': 7.51, - 'wind_speed': 7.51, - }), - dict({ - 'apparent_temperature': 23.4, - 'cloud_coverage': 99.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T20:00:00Z', - 'dew_point': 19.6, - 'humidity': 86, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.05, - 'temperature': 22.0, - 'uv_index': 0, - 'wind_bearing': 162, - 'wind_gust_speed': 8.73, - 'wind_speed': 8.73, - }), - dict({ - 'apparent_temperature': 23.9, - 'cloud_coverage': 98.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T21:00:00Z', - 'dew_point': 19.5, - 'humidity': 83, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.06, - 'temperature': 22.5, - 'uv_index': 0, - 'wind_bearing': 164, - 'wind_gust_speed': 9.21, - 'wind_speed': 9.11, - }), - dict({ - 'apparent_temperature': 25.3, - 'cloud_coverage': 96.0, - 'condition': 'cloudy', - 'datetime': '2023-09-18T22:00:00Z', - 'dew_point': 19.7, - 'humidity': 78, - 'precipitation': 0.0, - 'precipitation_probability': 0.0, - 'pressure': 1011.09, - 'temperature': 23.8, - 'uv_index': 1, - 'wind_bearing': 171, - 'wind_gust_speed': 9.03, - 'wind_speed': 7.91, - }), - ]), - }) -# --- # name: test_hourly_forecast[get_forecasts] dict({ 'weather.home': dict({ diff --git a/tests/components/webhook/test_init.py b/tests/components/webhook/test_init.py index 6f4ae1ebefc..af07616024a 100644 --- a/tests/components/webhook/test_init.py +++ b/tests/components/webhook/test_init.py @@ -319,7 +319,9 @@ async def test_ws_webhook( received = [] - async def handler(hass, webhook_id, request): + async def handler( + hass: HomeAssistant, webhook_id: str, request: web.Request + ) -> web.Response: """Handle a webhook.""" received.append(request) return web.json_response({"from": "handler"}) diff --git a/tests/components/webostv/__init__.py b/tests/components/webostv/__init__.py index 5ef210da56d..d6c096f9d3a 100644 --- a/tests/components/webostv/__init__.py +++ b/tests/components/webostv/__init__.py @@ -2,6 +2,7 @@ from homeassistant.components.webostv.const import DOMAIN from homeassistant.const import CONF_CLIENT_SECRET, CONF_HOST +from homeassistant.core import HomeAssistant from homeassistant.setup import async_setup_component from .const import CLIENT_KEY, FAKE_UUID, HOST, TV_NAME @@ -9,7 +10,9 @@ from .const import CLIENT_KEY, FAKE_UUID, HOST, TV_NAME from tests.common import MockConfigEntry -async def setup_webostv(hass, unique_id=FAKE_UUID): +async def setup_webostv( + hass: HomeAssistant, unique_id: str | None = FAKE_UUID +) -> MockConfigEntry: """Initialize webostv and media_player for tests.""" entry = MockConfigEntry( domain=DOMAIN, diff --git a/tests/components/webostv/test_config_flow.py b/tests/components/webostv/test_config_flow.py index 406bb9c8804..9b2983aab47 100644 --- a/tests/components/webostv/test_config_flow.py +++ b/tests/components/webostv/test_config_flow.py @@ -302,11 +302,7 @@ async def test_reauth_successful( entry = await setup_webostv(hass) assert client - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) @@ -339,11 +335,7 @@ async def test_reauth_errors( entry = await setup_webostv(hass) assert client - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" result = await hass.config_entries.flow.async_configure(result["flow_id"]) diff --git a/tests/components/websocket_api/test_auth.py b/tests/components/websocket_api/test_auth.py index 62298098adc..20a728cf3cd 100644 --- a/tests/components/websocket_api/test_auth.py +++ b/tests/components/websocket_api/test_auth.py @@ -26,7 +26,7 @@ from tests.typing import ClientSessionGenerator @pytest.fixture -def track_connected(hass): +def track_connected(hass: HomeAssistant) -> dict[str, list[int]]: """Track connected and disconnected events.""" connected_evt = [] diff --git a/tests/components/websocket_api/test_commands.py b/tests/components/websocket_api/test_commands.py index 10a9c4876b9..54a87e033dc 100644 --- a/tests/components/websocket_api/test_commands.py +++ b/tests/components/websocket_api/test_commands.py @@ -920,7 +920,7 @@ async def test_subscribe_entities_with_unserializable_state( class CannotSerializeMe: """Cannot serialize this.""" - def __init__(self): + def __init__(self) -> None: """Init cannot serialize this.""" hass.states.async_set("light.permitted", "off", {"color": "red"}) @@ -1262,6 +1262,54 @@ async def test_subscribe_unsubscribe_entities_specific_entities( } +async def test_subscribe_unsubscribe_entities_with_filter( + hass: HomeAssistant, + websocket_client: MockHAClientWebSocket, + hass_admin_user: MockUser, +) -> None: + """Test subscribe/unsubscribe entities with an entity filter.""" + hass.states.async_set("switch.not_included", "off") + hass.states.async_set("light.include", "off") + await websocket_client.send_json( + {"id": 7, "type": "subscribe_entities", "include": {"domains": ["light"]}} + ) + + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == const.TYPE_RESULT + assert msg["success"] + + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "a": { + "light.include": { + "a": {}, + "c": ANY, + "lc": ANY, + "s": "off", + } + } + } + hass.states.async_set("switch.not_included", "on") + hass.states.async_set("light.include", "on") + msg = await websocket_client.receive_json() + assert msg["id"] == 7 + assert msg["type"] == "event" + assert msg["event"] == { + "c": { + "light.include": { + "+": { + "c": ANY, + "lc": ANY, + "s": "on", + } + } + } + } + + async def test_render_template_renders_template( hass: HomeAssistant, websocket_client ) -> None: diff --git a/tests/components/websocket_api/test_decorators.py b/tests/components/websocket_api/test_decorators.py index 0ade5329190..81ac4b96409 100644 --- a/tests/components/websocket_api/test_decorators.py +++ b/tests/components/websocket_api/test_decorators.py @@ -1,5 +1,7 @@ """Test decorators.""" +from typing import Any + import voluptuous as vol from homeassistant.components import http, websocket_api @@ -19,24 +21,40 @@ async def test_async_response_request_context( @websocket_api.websocket_command({"type": "test-get-request-executor"}) @websocket_api.async_response - async def executor_get_request(hass, connection, msg): + async def executor_get_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: handle_request( await hass.async_add_executor_job(http.current_request.get), connection, msg ) @websocket_api.websocket_command({"type": "test-get-request-async"}) @websocket_api.async_response - async def async_get_request(hass, connection, msg): + async def async_get_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: handle_request(http.current_request.get(), connection, msg) @websocket_api.websocket_command({"type": "test-get-request"}) - def get_request(hass, connection, msg): + def get_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: handle_request(http.current_request.get(), connection, msg) @websocket_api.websocket_command( {"type": "test-get-request-with-arg", vol.Required("arg"): str} ) - def get_with_arg_request(hass, connection, msg): + def get_with_arg_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: handle_request(http.current_request.get(), connection, msg) websocket_api.async_register_command(hass, executor_get_request) @@ -145,7 +163,11 @@ async def test_supervisor_only(hass: HomeAssistant, websocket_client) -> None: @websocket_api.ws_require_user(only_supervisor=True) @websocket_api.websocket_command({"type": "test-require-supervisor-user"}) - def require_supervisor_request(hass, connection, msg): + def require_supervisor_request( + hass: HomeAssistant, + connection: websocket_api.ActiveConnection, + msg: dict[str, Any], + ) -> None: connection.send_result(msg["id"]) websocket_api.async_register_command(hass, require_supervisor_request) diff --git a/tests/components/websocket_api/test_http.py b/tests/components/websocket_api/test_http.py index 11665da11b4..2530d885942 100644 --- a/tests/components/websocket_api/test_http.py +++ b/tests/components/websocket_api/test_http.py @@ -363,12 +363,12 @@ async def test_non_json_message( assert "bad= None: - """Test failing to prepare.""" + """Test failing to prepare due to timeout.""" with ( patch( "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", @@ -381,6 +381,24 @@ async def test_prepare_fail( assert "Timeout preparing request" in caplog.text +async def test_prepare_fail_connection_reset( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + caplog: pytest.LogCaptureFixture, +) -> None: + """Test failing to prepare due to connection reset.""" + with ( + patch( + "homeassistant.components.websocket_api.http.web.WebSocketResponse.prepare", + side_effect=(ConnectionResetError, web.WebSocketResponse.prepare), + ), + pytest.raises(WSServerHandshakeError), + ): + await hass_ws_client(hass) + + assert "Connection reset by peer while preparing WebSocket" in caplog.text + + async def test_enable_coalesce( hass: HomeAssistant, hass_ws_client: WebSocketGenerator, diff --git a/tests/components/wemo/conftest.py b/tests/components/wemo/conftest.py index 1316c37b62b..64bd89f4793 100644 --- a/tests/components/wemo/conftest.py +++ b/tests/components/wemo/conftest.py @@ -1,13 +1,15 @@ """Fixtures for pywemo.""" +from collections.abc import Generator import contextlib -from unittest.mock import create_autospec, patch +from unittest.mock import MagicMock, create_autospec, patch import pytest import pywemo from homeassistant.components.wemo import CONF_DISCOVERY, CONF_STATIC from homeassistant.components.wemo.const import DOMAIN +from homeassistant.core import HomeAssistant from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -22,13 +24,13 @@ MOCK_INSIGHT_STATE_THRESHOLD_POWER = 8.0 @pytest.fixture(name="pywemo_model") -def pywemo_model_fixture(): +def pywemo_model_fixture() -> str: """Fixture containing a pywemo class name used by pywemo_device_fixture.""" return "LightSwitch" @pytest.fixture(name="pywemo_registry", autouse=True) -async def async_pywemo_registry_fixture(): +def async_pywemo_registry_fixture() -> Generator[MagicMock]: """Fixture for SubscriptionRegistry instances.""" registry = create_autospec(pywemo.SubscriptionRegistry, instance=True) @@ -52,7 +54,9 @@ def pywemo_discovery_responder_fixture(): @contextlib.contextmanager -def create_pywemo_device(pywemo_registry, pywemo_model): +def create_pywemo_device( + pywemo_registry: MagicMock, pywemo_model: str +) -> pywemo.WeMoDevice: """Create a WeMoDevice instance.""" cls = getattr(pywemo, pywemo_model) device = create_autospec(cls, instance=True) @@ -90,14 +94,18 @@ def create_pywemo_device(pywemo_registry, pywemo_model): @pytest.fixture(name="pywemo_device") -def pywemo_device_fixture(pywemo_registry, pywemo_model): +def pywemo_device_fixture( + pywemo_registry: MagicMock, pywemo_model: str +) -> Generator[pywemo.WeMoDevice]: """Fixture for WeMoDevice instances.""" with create_pywemo_device(pywemo_registry, pywemo_model) as pywemo_device: yield pywemo_device @pytest.fixture(name="pywemo_dli_device") -def pywemo_dli_device_fixture(pywemo_registry, pywemo_model): +def pywemo_dli_device_fixture( + pywemo_registry: MagicMock, pywemo_model: str +) -> Generator[pywemo.WeMoDevice]: """Fixture for Digital Loggers emulated instances.""" with create_pywemo_device(pywemo_registry, pywemo_model) as pywemo_dli_device: pywemo_dli_device.model_name = "DLI emulated Belkin Socket" @@ -106,12 +114,14 @@ def pywemo_dli_device_fixture(pywemo_registry, pywemo_model): @pytest.fixture(name="wemo_entity_suffix") -def wemo_entity_suffix_fixture(): +def wemo_entity_suffix_fixture() -> str: """Fixture to select a specific entity for wemo_entity.""" return "" -async def async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix): +async def async_create_wemo_entity( + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Create a hass entity for a wemo device.""" assert await async_setup_component( hass, @@ -134,12 +144,16 @@ async def async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix): @pytest.fixture(name="wemo_entity") -async def async_wemo_entity_fixture(hass, pywemo_device, wemo_entity_suffix): +async def async_wemo_entity_fixture( + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Fixture for a Wemo entity in hass.""" return await async_create_wemo_entity(hass, pywemo_device, wemo_entity_suffix) @pytest.fixture(name="wemo_dli_entity") -async def async_wemo_dli_entity_fixture(hass, pywemo_dli_device, wemo_entity_suffix): +async def async_wemo_dli_entity_fixture( + hass: HomeAssistant, pywemo_dli_device: pywemo.WeMoDevice, wemo_entity_suffix: str +) -> er.RegistryEntry | None: """Fixture for a Wemo entity in hass.""" return await async_create_wemo_entity(hass, pywemo_dli_device, wemo_entity_suffix) diff --git a/tests/components/wemo/entity_test_helpers.py b/tests/components/wemo/entity_test_helpers.py index 6700b00ec38..f57dffad6f9 100644 --- a/tests/components/wemo/entity_test_helpers.py +++ b/tests/components/wemo/entity_test_helpers.py @@ -4,7 +4,11 @@ This is not a test module. These test methods are used by the platform test modu """ import asyncio +from collections.abc import Callable, Coroutine import threading +from typing import Any + +import pywemo from homeassistant.components.homeassistant import DOMAIN as HA_DOMAIN from homeassistant.components.wemo.coordinator import async_get_coordinator @@ -17,6 +21,7 @@ from homeassistant.const import ( STATE_UNAVAILABLE, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component @@ -40,7 +45,12 @@ def _perform_async_update(coordinator): return async_callback -async def _async_multiple_call_helper(hass, pywemo_device, call1, call2): +async def _async_multiple_call_helper( + hass: HomeAssistant, + pywemo_device: pywemo.WeMoDevice, + call1: Callable[[], Coroutine[Any, Any, None]], + call2: Callable[[], Coroutine[Any, Any, None]], +) -> None: """Create two calls (call1 & call2) in parallel; verify only one polls the device. There should only be one poll on the device at a time. Any parallel updates @@ -87,7 +97,7 @@ async def _async_multiple_call_helper(hass, pywemo_device, call1, call2): async def test_async_update_locked_callback_and_update( - hass: HomeAssistant, pywemo_device, wemo_entity + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry ) -> None: """Test that a callback and a state update request can't both happen at the same time. @@ -102,7 +112,7 @@ async def test_async_update_locked_callback_and_update( async def test_async_update_locked_multiple_updates( - hass: HomeAssistant, pywemo_device, wemo_entity + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry ) -> None: """Test that two hass async_update state updates do not proceed at the same time.""" coordinator = async_get_coordinator(hass, wemo_entity.device_id) @@ -112,7 +122,7 @@ async def test_async_update_locked_multiple_updates( async def test_async_update_locked_multiple_callbacks( - hass: HomeAssistant, pywemo_device, wemo_entity + hass: HomeAssistant, pywemo_device: pywemo.WeMoDevice, wemo_entity: er.RegistryEntry ) -> None: """Test that two device callback state updates do not proceed at the same time.""" coordinator = async_get_coordinator(hass, wemo_entity.device_id) @@ -158,24 +168,33 @@ class EntityTestHelpers: """Common state update helpers.""" async def test_async_update_locked_multiple_updates( - self, hass, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Test that two hass async_update state updates do not proceed at the same time.""" await test_async_update_locked_multiple_updates( hass, pywemo_device, wemo_entity ) async def test_async_update_locked_multiple_callbacks( - self, hass, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Test that two device callback state updates do not proceed at the same time.""" await test_async_update_locked_multiple_callbacks( hass, pywemo_device, wemo_entity ) async def test_async_update_locked_callback_and_update( - self, hass, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Test that a callback and a state update request can't both happen at the same time. When a state update is received via a callback from the device at the same time diff --git a/tests/components/wemo/test_binary_sensor.py b/tests/components/wemo/test_binary_sensor.py index 99a5df47e25..576283577c2 100644 --- a/tests/components/wemo/test_binary_sensor.py +++ b/tests/components/wemo/test_binary_sensor.py @@ -1,6 +1,7 @@ """Tests for the Wemo binary_sensor entity.""" import pytest +import pywemo from pywemo import StandbyState from homeassistant.components.homeassistant import ( @@ -12,6 +13,8 @@ from homeassistant.components.wemo.binary_sensor import ( MakerBinarySensor, ) from homeassistant.const import ATTR_ENTITY_ID, STATE_OFF, STATE_ON +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from .entity_test_helpers import EntityTestHelpers @@ -26,8 +29,12 @@ class TestMotion(EntityTestHelpers): return "Motion" async def test_binary_sensor_registry_state_callback( - self, hass, pywemo_registry, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_registry: pywemo.SubscriptionRegistry, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.get_state.return_value = 1 @@ -42,8 +49,12 @@ class TestMotion(EntityTestHelpers): assert hass.states.get(wemo_entity.entity_id).state == STATE_OFF async def test_binary_sensor_update_entity( - self, hass, pywemo_registry, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_registry: pywemo.SubscriptionRegistry, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Verify that the binary_sensor performs state updates.""" await async_setup_component(hass, HA_DOMAIN, {}) @@ -82,8 +93,12 @@ class TestMaker(EntityTestHelpers): return MakerBinarySensor._name_suffix.lower() async def test_registry_state_callback( - self, hass, pywemo_registry, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_registry: pywemo.SubscriptionRegistry, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.sensor_state = 0 @@ -112,8 +127,12 @@ class TestInsight(EntityTestHelpers): return InsightBinarySensor._name_suffix.lower() async def test_registry_state_callback( - self, hass, pywemo_registry, pywemo_device, wemo_entity - ): + self, + hass: HomeAssistant, + pywemo_registry: pywemo.SubscriptionRegistry, + pywemo_device: pywemo.WeMoDevice, + wemo_entity: er.RegistryEntry, + ) -> None: """Verify that the binary_sensor receives state updates from the registry.""" # On state. pywemo_device.get_state.return_value = 1 diff --git a/tests/components/wemo/test_coordinator.py b/tests/components/wemo/test_coordinator.py index 198b132bbd0..f524633e701 100644 --- a/tests/components/wemo/test_coordinator.py +++ b/tests/components/wemo/test_coordinator.py @@ -3,9 +3,10 @@ import asyncio from dataclasses import asdict from datetime import timedelta -from unittest.mock import call, patch +from unittest.mock import _Call, call, patch import pytest +import pywemo from pywemo.exceptions import ActionException, PyWeMoException from pywemo.subscribe import EVENT_TYPE_LONG_PRESS @@ -14,7 +15,7 @@ from homeassistant.components.wemo import CONF_DISCOVERY, CONF_STATIC from homeassistant.components.wemo.const import DOMAIN, WEMO_SUBSCRIPTION_EVENT from homeassistant.components.wemo.coordinator import Options, async_get_coordinator from homeassistant.core import HomeAssistant, callback -from homeassistant.helpers import device_registry as dr +from homeassistant.helpers import device_registry as dr, entity_registry as er from homeassistant.helpers.update_coordinator import UpdateFailed from homeassistant.setup import async_setup_component from homeassistant.util.dt import utcnow @@ -248,14 +249,14 @@ class TestInsight: ) async def test_should_poll( self, - hass, - subscribed, - state, - expected_calls, - wemo_entity, - pywemo_device, - pywemo_registry, - ): + hass: HomeAssistant, + subscribed: bool, + state: int, + expected_calls: list[_Call], + wemo_entity: er.RegistryEntry, + pywemo_device: pywemo.WeMoDevice, + pywemo_registry: pywemo.SubscriptionRegistry, + ) -> None: """Validate the should_poll returns the correct value.""" pywemo_registry.is_subscribed.return_value = subscribed pywemo_device.get_state.reset_mock() diff --git a/tests/components/wemo/test_device_trigger.py b/tests/components/wemo/test_device_trigger.py index 47b704dae5d..477f5ee3960 100644 --- a/tests/components/wemo/test_device_trigger.py +++ b/tests/components/wemo/test_device_trigger.py @@ -16,6 +16,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er from homeassistant.setup import async_setup_component from tests.common import async_get_device_automations, async_mock_service @@ -29,7 +30,9 @@ def pywemo_model(): return "LightSwitchLongPress" -async def setup_automation(hass, device_id, trigger_type): +async def setup_automation( + hass: HomeAssistant, device_id: str, trigger_type: str +) -> None: """Set up an automation trigger for testing triggering.""" return await async_setup_component( hass, @@ -96,7 +99,9 @@ async def test_get_triggers(hass: HomeAssistant, wemo_entity) -> None: assert triggers == unordered(expected_triggers) -async def test_fires_on_long_press(hass: HomeAssistant, wemo_entity) -> None: +async def test_fires_on_long_press( + hass: HomeAssistant, wemo_entity: er.RegistryEntry +) -> None: """Test wemo long press trigger firing.""" assert await setup_automation(hass, wemo_entity.device_id, EVENT_TYPE_LONG_PRESS) calls = async_mock_service(hass, "test", "automation") diff --git a/tests/components/wemo/test_sensor.py b/tests/components/wemo/test_sensor.py index 7e0c8fa72f0..2259bfbbf18 100644 --- a/tests/components/wemo/test_sensor.py +++ b/tests/components/wemo/test_sensor.py @@ -2,6 +2,9 @@ import pytest +from homeassistant.core import HomeAssistant +from homeassistant.helpers import entity_registry as er + from .conftest import MOCK_INSIGHT_CURRENT_WATTS, MOCK_INSIGHT_TODAY_KWH from .entity_test_helpers import EntityTestHelpers @@ -24,7 +27,7 @@ class InsightTestTemplate(EntityTestHelpers): """Select the appropriate entity for the test.""" return cls.ENTITY_ID_SUFFIX - def test_state(self, hass, wemo_entity): + def test_state(self, hass: HomeAssistant, wemo_entity: er.RegistryEntry) -> None: """Test the sensor state.""" assert hass.states.get(wemo_entity.entity_id).state == self.EXPECTED_STATE_VALUE diff --git a/tests/components/whirlpool/test_config_flow.py b/tests/components/whirlpool/test_config_flow.py index e3896a436d4..1240e1303e1 100644 --- a/tests/components/whirlpool/test_config_flow.py +++ b/tests/components/whirlpool/test_config_flow.py @@ -235,15 +235,7 @@ async def test_reauth_flow(hass: HomeAssistant, region, brand) -> None: ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM @@ -294,21 +286,7 @@ async def test_reauth_flow_auth_error(hass: HomeAssistant, region, brand) -> Non ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data={ - CONF_USERNAME: "test-username", - CONF_PASSWORD: "new-password", - "region": region[0], - "brand": brand[0], - }, - ) - + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -345,15 +323,7 @@ async def test_reauth_flow_connnection_error( ) mock_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": mock_entry.unique_id, - "entry_id": mock_entry.entry_id, - }, - data=CONFIG_INPUT | {"region": region[0], "brand": brand[0]}, - ) + result = await mock_entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM diff --git a/tests/components/whois/conftest.py b/tests/components/whois/conftest.py index 1c779cce671..4bb18581c1a 100644 --- a/tests/components/whois/conftest.py +++ b/tests/components/whois/conftest.py @@ -4,6 +4,7 @@ from __future__ import annotations from collections.abc import Generator from datetime import datetime +from typing import Any from unittest.mock import AsyncMock, MagicMock, Mock, patch import pytest @@ -74,7 +75,7 @@ def mock_whois_missing_some_attrs() -> Generator[Mock]: class LimitedWhoisMock: """A limited mock of whois_query.""" - def __init__(self, *args, **kwargs): + def __init__(self, *args: Any, **kwargs: Any) -> None: """Mock only attributes the library always sets being available.""" self.creation_date = datetime(2019, 1, 1, 0, 0, 0) self.dnssec = True diff --git a/tests/components/withings/snapshots/test_diagnostics.ambr b/tests/components/withings/snapshots/test_diagnostics.ambr index df2a3b95388..f7c704a2c49 100644 --- a/tests/components/withings/snapshots/test_diagnostics.ambr +++ b/tests/components/withings/snapshots/test_diagnostics.ambr @@ -15,7 +15,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'unknown', + 'basal_metabolic_rate', 'fat_ratio', 'height', 'temperature', @@ -78,7 +78,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'unknown', + 'basal_metabolic_rate', 'fat_ratio', 'height', 'temperature', @@ -141,7 +141,7 @@ 'extracellular_water', 'intracellular_water', 'visceral_fat', - 'unknown', + 'basal_metabolic_rate', 'fat_ratio', 'height', 'temperature', diff --git a/tests/components/withings/test_config_flow.py b/tests/components/withings/test_config_flow.py index 20bef90a31e..39c8340a78e 100644 --- a/tests/components/withings/test_config_flow.py +++ b/tests/components/withings/test_config_flow.py @@ -5,7 +5,7 @@ from unittest.mock import AsyncMock, patch import pytest from homeassistant.components.withings.const import DOMAIN -from homeassistant.config_entries import SOURCE_REAUTH, SOURCE_USER +from homeassistant.config_entries import SOURCE_USER from homeassistant.core import HomeAssistant from homeassistant.data_entry_flow import FlowResultType from homeassistant.helpers import config_entry_oauth2_flow @@ -145,14 +145,7 @@ async def test_config_reauth_profile( """Test reauth an existing profile reauthenticates the config entry.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" @@ -207,14 +200,7 @@ async def test_config_reauth_wrong_account( """Test reauth with wrong account.""" await setup_integration(hass, polling_config_entry) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": SOURCE_REAUTH, - "entry_id": polling_config_entry.entry_id, - }, - data=polling_config_entry.data, - ) + result = await polling_config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/wiz/__init__.py b/tests/components/wiz/__init__.py index e80a1ed8249..d84074e37d3 100644 --- a/tests/components/wiz/__init__.py +++ b/tests/components/wiz/__init__.py @@ -1,9 +1,10 @@ """Tests for the WiZ Platform integration.""" -from collections.abc import Callable -from contextlib import contextmanager +from collections.abc import Callable, Generator +from contextlib import _GeneratorContextManager, contextmanager from copy import deepcopy import json +from typing import Any from unittest.mock import AsyncMock, MagicMock, patch from pywizlight import SCENES, BulbType, PilotParser, wizlight @@ -194,7 +195,11 @@ async def setup_integration(hass: HomeAssistant) -> MockConfigEntry: return entry -def _mocked_wizlight(device, extended_white_range, bulb_type) -> wizlight: +def _mocked_wizlight( + device: dict[str, Any] | None, + extended_white_range: list[int] | None, + bulb_type: BulbType | None, +) -> wizlight: bulb = MagicMock(auto_spec=wizlight, name="Mocked wizlight") async def _save_setup_callback(callback: Callable) -> None: @@ -228,9 +233,13 @@ def _mocked_wizlight(device, extended_white_range, bulb_type) -> wizlight: return bulb -def _patch_wizlight(device=None, extended_white_range=None, bulb_type=None): +def _patch_wizlight( + device: dict[str, Any] | None = None, + extended_white_range: list[int] | None = None, + bulb_type: BulbType | None = None, +) -> _GeneratorContextManager: @contextmanager - def _patcher(): + def _patcher() -> Generator[None]: bulb = device or _mocked_wizlight(device, extended_white_range, bulb_type) with ( patch("homeassistant.components.wiz.wizlight", return_value=bulb), @@ -244,9 +253,9 @@ def _patch_wizlight(device=None, extended_white_range=None, bulb_type=None): return _patcher() -def _patch_discovery(): +def _patch_discovery() -> _GeneratorContextManager[None]: @contextmanager - def _patcher(): + def _patcher() -> Generator[None]: with patch( "homeassistant.components.wiz.discovery.find_wizlights", return_value=[DiscoveredBulb(FAKE_IP, FAKE_MAC)], @@ -257,8 +266,12 @@ def _patch_discovery(): async def async_setup_integration( - hass, wizlight=None, device=None, extended_white_range=None, bulb_type=None -): + hass: HomeAssistant, + wizlight: wizlight | None = None, + device: dict[str, Any] | None = None, + extended_white_range: list[int] | None = None, + bulb_type: BulbType | None = None, +) -> tuple[wizlight, MockConfigEntry]: """Set up the integration with a mock device.""" entry = MockConfigEntry( domain=DOMAIN, @@ -273,7 +286,9 @@ async def async_setup_integration( return bulb, entry -async def async_push_update(hass, device, params): +async def async_push_update( + hass: HomeAssistant, device: wizlight, params: dict[str, Any] +) -> None: """Push an update to the device.""" device.state = PilotParser(params) device.status = params.get("state") diff --git a/tests/components/ws66i/test_media_player.py b/tests/components/ws66i/test_media_player.py index a66e79bf9e0..23f64d7d514 100644 --- a/tests/components/ws66i/test_media_player.py +++ b/tests/components/ws66i/test_media_player.py @@ -1,6 +1,7 @@ """The tests for WS66i Media player platform.""" from collections import defaultdict +from typing import Any from unittest.mock import patch from freezegun.api import FrozenDateTimeFactory @@ -73,7 +74,7 @@ class AttrDict(dict): class MockWs66i: """Mock for pyws66i object.""" - def __init__(self, fail_open=False, fail_zone_check=None): + def __init__(self, fail_open=False, fail_zone_check=None) -> None: """Init mock object.""" self.zones = defaultdict( lambda: AttrDict( @@ -170,7 +171,9 @@ async def _setup_ws66i_with_options(hass: HomeAssistant, ws66i) -> MockConfigEnt return config_entry -async def _call_media_player_service(hass, name, data): +async def _call_media_player_service( + hass: HomeAssistant, name: str, data: dict[str, Any] +) -> None: await hass.services.async_call( MEDIA_PLAYER_DOMAIN, name, service_data=data, blocking=True ) diff --git a/tests/components/wyoming/conftest.py b/tests/components/wyoming/conftest.py index f6093e34261..770186d92aa 100644 --- a/tests/components/wyoming/conftest.py +++ b/tests/components/wyoming/conftest.py @@ -19,9 +19,8 @@ from tests.common import MockConfigEntry @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir @pytest.fixture(autouse=True) diff --git a/tests/components/wyoming/snapshots/test_config_flow.ambr b/tests/components/wyoming/snapshots/test_config_flow.ambr index a0e0c7c5011..ee4c5533254 100644 --- a/tests/components/wyoming/snapshots/test_config_flow.ambr +++ b/tests/components/wyoming/snapshots/test_config_flow.ambr @@ -1,42 +1,4 @@ # serializer version: 1 -# name: test_hassio_addon_discovery - FlowResultSnapshot({ - 'context': dict({ - 'source': 'hassio', - 'unique_id': '1234', - }), - 'data': dict({ - 'host': 'mock-piper', - 'port': 10200, - }), - 'description': None, - 'description_placeholders': None, - 'flow_id': , - 'handler': 'wyoming', - 'options': dict({ - }), - 'result': ConfigEntrySnapshot({ - 'data': dict({ - 'host': 'mock-piper', - 'port': 10200, - }), - 'disabled_by': None, - 'domain': 'wyoming', - 'entry_id': , - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'hassio', - 'title': 'Piper', - 'unique_id': '1234', - 'version': 1, - }), - 'title': 'Piper', - 'type': , - 'version': 1, - }) -# --- # name: test_hassio_addon_discovery[info0] FlowResultSnapshot({ 'context': dict({ diff --git a/tests/components/wyoming/snapshots/test_tts.ambr b/tests/components/wyoming/snapshots/test_tts.ambr index 299bddb07e5..7ca5204e66c 100644 --- a/tests/components/wyoming/snapshots/test_tts.ambr +++ b/tests/components/wyoming/snapshots/test_tts.ambr @@ -32,28 +32,6 @@ }), ]) # --- -# name: test_get_tts_audio_mp3 - list([ - dict({ - 'data': dict({ - 'text': 'Hello world', - }), - 'payload': None, - 'type': 'synthesize', - }), - ]) -# --- -# name: test_get_tts_audio_raw - list([ - dict({ - 'data': dict({ - 'text': 'Hello world', - }), - 'payload': None, - 'type': 'synthesize', - }), - ]) -# --- # name: test_voice_speaker list([ dict({ diff --git a/tests/components/xiaomi_ble/conftest.py b/tests/components/xiaomi_ble/conftest.py index 8994aec813c..d4864cbe2f8 100644 --- a/tests/components/xiaomi_ble/conftest.py +++ b/tests/components/xiaomi_ble/conftest.py @@ -19,7 +19,7 @@ class MockBleakClient: services = MockServices() - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: """Mock BleakClient.""" async def __aenter__(self, *args, **kwargs): diff --git a/tests/components/xiaomi_ble/test_config_flow.py b/tests/components/xiaomi_ble/test_config_flow.py index b61615e0f79..f690665608b 100644 --- a/tests/components/xiaomi_ble/test_config_flow.py +++ b/tests/components/xiaomi_ble/test_config_flow.py @@ -1083,16 +1083,7 @@ async def test_async_step_reauth_abort_early(hass: HomeAssistant) -> None: device = DeviceData() - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "entry_id": entry.entry_id, - "title_placeholders": {"name": entry.title}, - "unique_id": entry.unique_id, - }, - data=entry.data | {"device": device}, - ) + result = await entry.start_reauth_flow(hass, data={"device": device}) assert result["type"] is FlowResultType.ABORT assert result["reason"] == "reauth_successful" diff --git a/tests/components/xiaomi_miio/test_config_flow.py b/tests/components/xiaomi_miio/test_config_flow.py index 481be189ddd..146526c69a5 100644 --- a/tests/components/xiaomi_miio/test_config_flow.py +++ b/tests/components/xiaomi_miio/test_config_flow.py @@ -704,7 +704,7 @@ async def test_config_flow_step_device_manual_model_succes(hass: HomeAssistant) } -async def config_flow_device_success(hass, model_to_test): +async def config_flow_device_success(hass: HomeAssistant, model_to_test: str) -> None: """Test a successful config flow for a device (base class).""" result = await hass.config_entries.flow.async_init( const.DOMAIN, context={"source": config_entries.SOURCE_USER} @@ -748,7 +748,7 @@ async def config_flow_device_success(hass, model_to_test): } -async def config_flow_generic_roborock(hass): +async def config_flow_generic_roborock(hass: HomeAssistant) -> None: """Test a successful config flow for a generic roborock vacuum.""" dummy_model = "roborock.vacuum.dummy" @@ -794,7 +794,9 @@ async def config_flow_generic_roborock(hass): } -async def zeroconf_device_success(hass, zeroconf_name_to_test, model_to_test): +async def zeroconf_device_success( + hass: HomeAssistant, zeroconf_name_to_test: str, model_to_test: str +) -> None: """Test a successful zeroconf discovery of a device (base class).""" result = await hass.config_entries.flow.async_init( const.DOMAIN, @@ -974,11 +976,7 @@ async def test_reauth(hass: HomeAssistant) -> None: assert await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() - result = await hass.config_entries.flow.async_init( - const.DOMAIN, - context={"source": config_entries.SOURCE_REAUTH}, - data=config_entry.data, - ) + result = await config_entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_confirm" diff --git a/tests/components/xiaomi_miio/test_select.py b/tests/components/xiaomi_miio/test_select.py index f2f04127d75..584ef910c98 100644 --- a/tests/components/xiaomi_miio/test_select.py +++ b/tests/components/xiaomi_miio/test_select.py @@ -141,7 +141,7 @@ async def test_select_coordinator_update(hass: HomeAssistant, setup_test) -> Non assert state.state == "left" -async def setup_component(hass, entity_name): +async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up component.""" entity_id = f"{DOMAIN}.{entity_name}" diff --git a/tests/components/xiaomi_miio/test_vacuum.py b/tests/components/xiaomi_miio/test_vacuum.py index 54646d30513..64612f6f464 100644 --- a/tests/components/xiaomi_miio/test_vacuum.py +++ b/tests/components/xiaomi_miio/test_vacuum.py @@ -533,7 +533,7 @@ async def test_xiaomi_vacuum_fanspeeds( assert "Fan speed step not recognized" in caplog.text -async def setup_component(hass, entity_name): +async def setup_component(hass: HomeAssistant, entity_name: str) -> str: """Set up vacuum component.""" entity_id = f"{DOMAIN}.{entity_name}" diff --git a/tests/components/yale/__init__.py b/tests/components/yale/__init__.py new file mode 100644 index 00000000000..7f72d348042 --- /dev/null +++ b/tests/components/yale/__init__.py @@ -0,0 +1 @@ +"""Tests for the yale component.""" diff --git a/tests/components/yale/conftest.py b/tests/components/yale/conftest.py new file mode 100644 index 00000000000..3e633430846 --- /dev/null +++ b/tests/components/yale/conftest.py @@ -0,0 +1,72 @@ +"""Yale tests conftest.""" + +from unittest.mock import patch + +import pytest +from yalexs.manager.ratelimit import _RateLimitChecker + +from homeassistant.components.yale.const import DOMAIN +from homeassistant.core import HomeAssistant + +from .mocks import mock_client_credentials, mock_config_entry + +from tests.common import MockConfigEntry, load_fixture + + +@pytest.fixture(name="mock_discovery", autouse=True) +def mock_discovery_fixture(): + """Mock discovery to avoid loading the whole bluetooth stack.""" + with patch( + "homeassistant.components.yale.data.discovery_flow.async_create_flow" + ) as mock_discovery: + yield mock_discovery + + +@pytest.fixture(name="disable_ratelimit_checks", autouse=True) +def disable_ratelimit_checks_fixture(): + """Disable rate limit checks.""" + with patch.object(_RateLimitChecker, "register_wakeup"): + yield + + +@pytest.fixture(name="mock_config_entry") +def mock_config_entry_fixture(jwt: str) -> MockConfigEntry: + """Return the default mocked config entry.""" + return mock_config_entry(jwt=jwt) + + +@pytest.fixture(name="jwt") +def load_jwt_fixture() -> str: + """Load Fixture data.""" + return load_fixture("jwt", DOMAIN).strip("\n") + + +@pytest.fixture(name="reauth_jwt") +def load_reauth_jwt_fixture() -> str: + """Load Fixture data.""" + return load_fixture("reauth_jwt", DOMAIN).strip("\n") + + +@pytest.fixture(name="reauth_jwt_wrong_account") +def load_reauth_jwt_wrong_account_fixture() -> str: + """Load Fixture data.""" + return load_fixture("reauth_jwt_wrong_account", DOMAIN).strip("\n") + + +@pytest.fixture(name="client_credentials", autouse=True) +async def mock_client_credentials_fixture(hass: HomeAssistant) -> None: + """Mock client credentials.""" + await mock_client_credentials(hass) + + +@pytest.fixture(name="skip_cloud", autouse=True) +def skip_cloud_fixture(): + """Skip setting up cloud. + + Cloud already has its own tests for account link. + + We do not need to test it here as we only need to test our + usage of the oauth2 helpers. + """ + with patch("homeassistant.components.cloud.async_setup", return_value=True): + yield diff --git a/tests/components/yale/fixtures/get_activity.bridge_offline.json b/tests/components/yale/fixtures/get_activity.bridge_offline.json new file mode 100644 index 00000000000..9c2ded96665 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.bridge_offline.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "associated_bridge_offline", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.bridge_online.json b/tests/components/yale/fixtures/get_activity.bridge_online.json new file mode 100644 index 00000000000..6f8b5e6a4a6 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.bridge_online.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "associated_bridge_online", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.doorbell_motion.json b/tests/components/yale/fixtures/get_activity.doorbell_motion.json new file mode 100644 index 00000000000..cf0f231a49a --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.doorbell_motion.json @@ -0,0 +1,58 @@ +[ + { + "otherUser": { + "FirstName": "Unknown", + "UserName": "deleteduser", + "LastName": "User", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "dateTime": 1582663119959, + "deviceID": "K98GiDT45GUL", + "info": { + "videoUploadProgress": "in_progress", + "image": { + "resource_type": "image", + "etag": "fdsf", + "created_at": "2020-02-25T20:38:39Z", + "type": "upload", + "format": "jpg", + "version": 1582663119, + "secure_url": "https://res.cloudinary.com/updated_image.jpg", + "signature": "fdfdfd", + "url": "http://res.cloudinary.com/updated_image.jpg", + "bytes": 48545, + "placeholder": false, + "original_filename": "file", + "width": 720, + "tags": [], + "public_id": "xnsj5gphpzij9brifpf4", + "height": 576 + }, + "dvrID": "dvr", + "videoAvailable": false, + "hasSubscription": false + }, + "callingUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "house": { + "houseName": "K98GiDT45GUL", + "houseID": "na" + }, + "action": "doorbell_motion_detected", + "deviceType": "doorbell", + "entities": { + "otherUser": "deleted", + "house": "na", + "device": "K98GiDT45GUL", + "activity": "de5585cfd4eae900bb5ba3dc", + "callingUser": "deleted" + }, + "deviceName": "Front Door" + } +] diff --git a/tests/components/yale/fixtures/get_activity.jammed.json b/tests/components/yale/fixtures/get_activity.jammed.json new file mode 100644 index 00000000000..782a13f9c73 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.jammed.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "jammed", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock.json b/tests/components/yale/fixtures/get_activity.lock.json new file mode 100644 index 00000000000..b40e7d61ccf --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json b/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json new file mode 100644 index 00000000000..38c26ffb7dd --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_autorelock.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "Relock", + "UserID": "automaticrelock", + "FirstName": "Auto" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json b/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json new file mode 100644 index 00000000000..bfbc621e064 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_bluetooth.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_keypad.json b/tests/components/yale/fixtures/get_activity.lock_from_keypad.json new file mode 100644 index 00000000000..1b1e13e67dd --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_keypad.json @@ -0,0 +1,37 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.lock_from_manual.json b/tests/components/yale/fixtures/get_activity.lock_from_manual.json new file mode 100644 index 00000000000..e2fc195cfda --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.lock_from_manual.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "lock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": true, + "tag": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.locking.json b/tests/components/yale/fixtures/get_activity.locking.json new file mode 100644 index 00000000000..ad2df6f7e91 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.locking.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "locking", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlock_from_manual.json b/tests/components/yale/fixtures/get_activity.unlock_from_manual.json new file mode 100644 index 00000000000..e8bf95818ce --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlock_from_manual.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": true, + "tag": false, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlock_from_tag.json b/tests/components/yale/fixtures/get_activity.unlock_from_tag.json new file mode 100644 index 00000000000..57876428677 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlock_from_tag.json @@ -0,0 +1,39 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlock", + "dateTime": 1582007218000, + "info": { + "remote": false, + "keypad": false, + "manual": false, + "tag": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_activity.unlocking.json b/tests/components/yale/fixtures/get_activity.unlocking.json new file mode 100644 index 00000000000..0fbd0be3eb8 --- /dev/null +++ b/tests/components/yale/fixtures/get_activity.unlocking.json @@ -0,0 +1,36 @@ +[ + { + "entities": { + "activity": "mockActivity2", + "house": "123", + "device": "online_with_doorsense", + "callingUser": "mockUserId2", + "otherUser": "deleted" + }, + "callingUser": { + "LastName": "elven princess", + "UserID": "mockUserId2", + "FirstName": "Your favorite" + }, + "otherUser": { + "LastName": "User", + "UserName": "deleteduser", + "FirstName": "Unknown", + "UserID": "deleted", + "PhoneNo": "deleted" + }, + "deviceType": "lock", + "deviceName": "MockHouseTDoor", + "action": "unlocking", + "dateTime": 1582007218000, + "info": { + "remote": true, + "DateLogActionID": "ABC+Time" + }, + "deviceID": "online_with_doorsense", + "house": { + "houseName": "MockHouse", + "houseID": "123" + } + } +] diff --git a/tests/components/yale/fixtures/get_doorbell.json b/tests/components/yale/fixtures/get_doorbell.json new file mode 100644 index 00000000000..32714211618 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.json @@ -0,0 +1,81 @@ +{ + "status_timestamp": 1512811834532, + "appID": "august-iphone", + "LockID": "BBBB1F5F11114C24CCCC97571DD6AAAA", + "recentImage": { + "original_filename": "file", + "placeholder": false, + "bytes": 24476, + "height": 640, + "format": "jpg", + "width": 480, + "version": 1512892814, + "resource_type": "image", + "etag": "54966926be2e93f77d498a55f247661f", + "tags": [], + "public_id": "qqqqt4ctmxwsysylaaaa", + "url": "http://image.com/vmk16naaaa7ibuey7sar.jpg", + "created_at": "2017-12-10T08:01:35Z", + "signature": "75z47ca21b5e8ffda21d2134e478a2307c4625da", + "secure_url": "https://image.com/vmk16naaaa7ibuey7sar.jpg", + "type": "upload" + }, + "settings": { + "keepEncoderRunning": true, + "videoResolution": "640x480", + "minACNoScaling": 40, + "irConfiguration": 8448272, + "directLink": true, + "overlayEnabled": true, + "notify_when_offline": true, + "micVolume": 100, + "bitrateCeiling": 512000, + "initialBitrate": 384000, + "IVAEnabled": false, + "turnOffCamera": false, + "ringSoundEnabled": true, + "JPGQuality": 70, + "motion_notifications": true, + "speakerVolume": 92, + "buttonpush_notifications": true, + "ABREnabled": true, + "debug": false, + "batteryLowThreshold": 3.1, + "batteryRun": false, + "IREnabled": true, + "batteryUseThreshold": 3.4 + }, + "doorbellServerURL": "https://doorbells.august.com", + "name": "Front Door", + "createdAt": "2016-11-26T22:27:11.176Z", + "installDate": "2016-11-26T22:27:11.176Z", + "serialNumber": "tBXZR0Z35E", + "dvrSubscriptionSetupDone": true, + "caps": ["reconnect"], + "doorbellID": "K98GiDT45GUL", + "HouseID": "mockhouseid1", + "telemetry": { + "signal_level": -56, + "date": "2017-12-10 08:05:12", + "battery_soc": 96, + "battery": 4.061763, + "steady_ac_in": 22.196405, + "BSSID": "88:ee:00:dd:aa:11", + "SSID": "foo_ssid", + "updated_at": "2017-12-10T08:05:13.650Z", + "temperature": 28.25, + "wifi_freq": 5745, + "load_average": "0.50 0.47 0.35 1/154 9345", + "link_quality": 54, + "battery_soh": 95, + "uptime": "16168.75 13830.49", + "ip_addr": "10.0.1.11", + "doorbell_low_battery": false, + "ac_in": 23.856874 + }, + "installUserID": "c3b2a94e-373e-aaaa-bbbb-36e996827777", + "status": "doorbell_call_status_online", + "firmwareVersion": "2.3.0-RC153+201711151527", + "pubsubChannel": "7c7a6672-59c8-3333-ffff-dcd98705cccc", + "updatedAt": "2017-12-10T08:05:13.650Z" +} diff --git a/tests/components/yale/fixtures/get_doorbell.nobattery.json b/tests/components/yale/fixtures/get_doorbell.nobattery.json new file mode 100644 index 00000000000..2a7f1e2d3b2 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.nobattery.json @@ -0,0 +1,78 @@ +{ + "status_timestamp": 1512811834532, + "appID": "august-iphone", + "LockID": "BBBB1F5F11114C24CCCC97571DD6AAAA", + "recentImage": { + "original_filename": "file", + "placeholder": false, + "bytes": 24476, + "height": 640, + "format": "jpg", + "width": 480, + "version": 1512892814, + "resource_type": "image", + "etag": "54966926be2e93f77d498a55f247661f", + "tags": [], + "public_id": "qqqqt4ctmxwsysylaaaa", + "url": "http://image.com/vmk16naaaa7ibuey7sar.jpg", + "created_at": "2017-12-10T08:01:35Z", + "signature": "75z47ca21b5e8ffda21d2134e478a2307c4625da", + "secure_url": "https://image.com/vmk16naaaa7ibuey7sar.jpg", + "type": "upload" + }, + "settings": { + "keepEncoderRunning": true, + "videoResolution": "640x480", + "minACNoScaling": 40, + "irConfiguration": 8448272, + "directLink": true, + "overlayEnabled": true, + "notify_when_offline": true, + "micVolume": 100, + "bitrateCeiling": 512000, + "initialBitrate": 384000, + "IVAEnabled": false, + "turnOffCamera": false, + "ringSoundEnabled": true, + "JPGQuality": 70, + "motion_notifications": true, + "speakerVolume": 92, + "buttonpush_notifications": true, + "ABREnabled": true, + "debug": false, + "batteryLowThreshold": 3.1, + "batteryRun": false, + "IREnabled": true, + "batteryUseThreshold": 3.4 + }, + "doorbellServerURL": "https://doorbells.august.com", + "name": "Front Door", + "createdAt": "2016-11-26T22:27:11.176Z", + "installDate": "2016-11-26T22:27:11.176Z", + "serialNumber": "tBXZR0Z35E", + "dvrSubscriptionSetupDone": true, + "caps": ["reconnect"], + "doorbellID": "K98GiDT45GUL", + "HouseID": "3dd2accaea08", + "telemetry": { + "signal_level": -56, + "date": "2017-12-10 08:05:12", + "steady_ac_in": 22.196405, + "BSSID": "88:ee:00:dd:aa:11", + "SSID": "foo_ssid", + "updated_at": "2017-12-10T08:05:13.650Z", + "temperature": 28.25, + "wifi_freq": 5745, + "load_average": "0.50 0.47 0.35 1/154 9345", + "link_quality": 54, + "uptime": "16168.75 13830.49", + "ip_addr": "10.0.1.11", + "doorbell_low_battery": false, + "ac_in": 23.856874 + }, + "installUserID": "c3b2a94e-373e-aaaa-bbbb-36e996827777", + "status": "doorbell_call_status_online", + "firmwareVersion": "2.3.0-RC153+201711151527", + "pubsubChannel": "7c7a6672-59c8-3333-ffff-dcd98705cccc", + "updatedAt": "2017-12-10T08:05:13.650Z" +} diff --git a/tests/components/yale/fixtures/get_doorbell.offline.json b/tests/components/yale/fixtures/get_doorbell.offline.json new file mode 100644 index 00000000000..13a8483c995 --- /dev/null +++ b/tests/components/yale/fixtures/get_doorbell.offline.json @@ -0,0 +1,126 @@ +{ + "recentImage": { + "tags": [], + "height": 576, + "public_id": "fdsfds", + "bytes": 50013, + "resource_type": "image", + "original_filename": "file", + "version": 1582242766, + "format": "jpg", + "signature": "fdsfdsf", + "created_at": "2020-02-20T23:52:46Z", + "type": "upload", + "placeholder": false, + "url": "http://res.cloudinary.com/august-com/image/upload/ccc/ccccc.jpg", + "secure_url": "https://res.cloudinary.com/august-com/image/upload/cc/cccc.jpg", + "etag": "zds", + "width": 720 + }, + "firmwareVersion": "3.1.0-HYDRC75+201909251139", + "doorbellServerURL": "https://doorbells.august.com", + "installUserID": "mock", + "caps": ["reconnect", "webrtc", "tcp_wakeup"], + "messagingProtocol": "pubnub", + "createdAt": "2020-02-12T03:52:28.719Z", + "invitations": [], + "appID": "august-iphone-v5", + "HouseID": "houseid1", + "doorbellID": "tmt100", + "name": "Front Door", + "settings": { + "batteryUseThreshold": 3.4, + "brightness": 50, + "batteryChargeCurrent": 60, + "overCurrentThreshold": -250, + "irLedBrightness": 40, + "videoResolution": "720x576", + "pirPulseCounter": 1, + "contrast": 50, + "micVolume": 50, + "directLink": true, + "auto_contrast_mode": 0, + "saturation": 50, + "motion_notifications": true, + "pirSensitivity": 20, + "pirBlindTime": 7, + "notify_when_offline": false, + "nightModeAlsThreshold": 10, + "minACNoScaling": 40, + "DVRRecordingTimeout": 15, + "turnOffCamera": false, + "debug": false, + "keepEncoderRunning": true, + "pirWindowTime": 0, + "bitrateCeiling": 2000000, + "backlight_comp": false, + "buttonpush_notifications": true, + "buttonpush_notifications_partners": false, + "minimumSnapshotInterval": 30, + "pirConfiguration": 272, + "batteryLowThreshold": 3.1, + "sharpness": 50, + "ABREnabled": true, + "hue": 50, + "initialBitrate": 1000000, + "ringSoundEnabled": true, + "IVAEnabled": false, + "overlayEnabled": true, + "speakerVolume": 92, + "ringRepetitions": 3, + "powerProfilePreset": -1, + "irConfiguration": 16836880, + "JPGQuality": 70, + "IREnabled": true + }, + "updatedAt": "2020-02-20T23:58:21.580Z", + "serialNumber": "abc", + "installDate": "2019-02-12T03:52:28.719Z", + "dvrSubscriptionSetupDone": true, + "pubsubChannel": "mock", + "chimes": [ + { + "updatedAt": "2020-02-12T03:55:38.805Z", + "_id": "cccc", + "type": 1, + "serialNumber": "ccccc", + "doorbellID": "tmt100", + "name": "Living Room", + "chimeID": "cccc", + "createdAt": "2020-02-12T03:55:38.805Z", + "firmware": "3.1.16" + } + ], + "telemetry": { + "battery": 3.985, + "battery_soc": 81, + "load_average": "0.45 0.18 0.07 4/98 831", + "ip_addr": "192.168.100.174", + "BSSID": "snp", + "uptime": "96.55 70.59", + "SSID": "bob", + "updated_at": "2020-02-20T23:53:09.586Z", + "dtim_period": 0, + "wifi_freq": 2462, + "date": "2020-02-20 11:47:36", + "BSSIDManufacturer": "Ubiquiti - Ubiquiti Networks Inc.", + "battery_temp": 22, + "battery_avg_cur": -291, + "beacon_interval": 0, + "signal_level": -49, + "battery_soh": 95, + "doorbell_low_battery": false + }, + "secChipCertSerial": "", + "tcpKeepAlive": { + "keepAliveUUID": "mock", + "wakeUp": { + "token": "wakemeup", + "lastUpdated": 1582242723931 + } + }, + "statusUpdatedAtMs": 1582243101579, + "status": "doorbell_offline", + "type": "hydra1", + "HouseName": "housename" +} diff --git a/tests/components/yale/fixtures/get_lock.doorsense_init.json b/tests/components/yale/fixtures/get_lock.doorsense_init.json new file mode 100644 index 00000000000..1132cc61a8d --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.doorsense_init.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "init", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": false, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.low_keypad_battery.json b/tests/components/yale/fixtures/get_lock.low_keypad_battery.json new file mode 100644 index 00000000000..43b5513a527 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.low_keypad_battery.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Low", + "batteryRaw": 128 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.offline.json b/tests/components/yale/fixtures/get_lock.offline.json new file mode 100644 index 00000000000..50d3d345ef8 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.offline.json @@ -0,0 +1,57 @@ +{ + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "houseid", + "HouseName": "MockName", + "LockID": "ABC", + "LockName": "Test", + "LockStatus": { + "status": "unknown" + }, + "OfflineKeys": { + "created": [], + "createdhk": [ + { + "UserID": "mock-user-id", + "created": "2000-00-00T00:00:00.447Z", + "key": "mockkey", + "slot": 12 + } + ], + "deleted": [], + "loaded": [] + }, + "SerialNumber": "ABC", + "Type": 3, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": -1, + "cameras": [], + "currentFirmwareVersion": "undefined-1.59.0-1.13.2", + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minGPSAccuracyRequired": 80, + "minimumGeofence": 100 + } + }, + "homeKitEnabled": false, + "isGalileo": false, + "macAddress": "a:b:c", + "parametersToSet": {}, + "pubsubChannel": "mockpubsub", + "ruleHash": {}, + "skuNumber": "AUG-X", + "supportsEntryCodes": false, + "users": { + "mockuserid": { + "FirstName": "MockName", + "LastName": "House", + "UserType": "superuser", + "identifiers": ["phone:+15558675309", "email:mockme@mock.org"] + } + }, + "zWaveDSK": "1-2-3-4", + "zWaveEnabled": true +} diff --git a/tests/components/yale/fixtures/get_lock.online.json b/tests/components/yale/fixtures/get_lock.online.json new file mode 100644 index 00000000000..7abadeef4b6 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online.json @@ -0,0 +1,92 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online.unknown_state.json b/tests/components/yale/fixtures/get_lock.online.unknown_state.json new file mode 100644 index 00000000000..abc8b40a132 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online.unknown_state.json @@ -0,0 +1,59 @@ +{ + "LockName": "Side Door", + "Type": 1001, + "Created": "2019-10-07T01:49:06.831Z", + "Updated": "2019-10-07T01:49:06.831Z", + "LockID": "BROKENID", + "HouseID": "abc", + "HouseName": "dog", + "Calibrated": false, + "timeZone": "America/Chicago", + "battery": 0.9524716174964851, + "hostLockInfo": { + "serialNumber": "YR", + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770 + }, + "supportsEntryCodes": true, + "skuNumber": "AUG-MD01", + "macAddress": "MAC", + "SerialNumber": "M1FXZ00EZ9", + "LockStatus": { + "status": "unknown_error_during_connect", + "dateTime": "2020-02-22T02:48:11.741Z", + "isLockStatusChanged": true, + "valid": true, + "doorState": "closed" + }, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "id", + "mfgBridgeID": "id", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "operative": true, + "status": { + "current": "online", + "updated": "2020-02-21T15:06:47.001Z", + "lastOnline": "2020-02-21T15:06:47.001Z", + "lastOffline": "2020-02-06T17:33:21.265Z" + }, + "hyperBridge": true + }, + "parametersToSet": {}, + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json b/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json new file mode 100644 index 00000000000..84822df9b89 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_missing_doorsense.json @@ -0,0 +1,50 @@ +{ + "Bridge": { + "_id": "bridgeid", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "hyperBridge": true, + "mfgBridgeID": "C5WY200WSH", + "operative": true, + "status": { + "current": "online", + "lastOffline": "2000-00-00T00:00:00.447Z", + "lastOnline": "2000-00-00T00:00:00.447Z", + "updated": "2000-00-00T00:00:00.447Z" + } + }, + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "123", + "HouseName": "Test", + "LockID": "missing_doorsense_id", + "LockName": "Online door missing doorsense", + "LockStatus": { + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": false, + "status": "locked", + "valid": true + }, + "SerialNumber": "XY", + "Type": 1001, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": 0.922, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "hostLockInfo": { + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770, + "serialNumber": "ABC" + }, + "isGalileo": false, + "macAddress": "12:22", + "pins": { + "created": [], + "loaded": [] + }, + "skuNumber": "AUG-MD01", + "supportsEntryCodes": true, + "timeZone": "Pacific/Hawaii", + "zWaveEnabled": false +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_doorsense.json b/tests/components/yale/fixtures/get_lock.online_with_doorsense.json new file mode 100644 index 00000000000..d9b413708ca --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_doorsense.json @@ -0,0 +1,52 @@ +{ + "Bridge": { + "_id": "bridgeid", + "deviceModel": "august-connect", + "firmwareVersion": "2.2.1", + "hyperBridge": true, + "mfgBridgeID": "C5WY200WSH", + "operative": true, + "status": { + "current": "online", + "lastOffline": "2000-00-00T00:00:00.447Z", + "lastOnline": "2000-00-00T00:00:00.447Z", + "updated": "2000-00-00T00:00:00.447Z" + } + }, + "pubsubChannel": "pubsub", + "Calibrated": false, + "Created": "2000-00-00T00:00:00.447Z", + "HouseID": "mockhouseid1", + "HouseName": "Test", + "LockID": "online_with_doorsense", + "LockName": "Online door with doorsense", + "LockStatus": { + "dateTime": "2017-12-10T04:48:30.272Z", + "doorState": "open", + "isLockStatusChanged": false, + "status": "locked", + "valid": true + }, + "SerialNumber": "XY", + "Type": 1001, + "Updated": "2000-00-00T00:00:00.447Z", + "battery": 0.922, + "currentFirmwareVersion": "undefined-4.3.0-1.8.14", + "homeKitEnabled": true, + "hostLockInfo": { + "manufacturer": "yale", + "productID": 1536, + "productTypeID": 32770, + "serialNumber": "ABC" + }, + "isGalileo": false, + "macAddress": "12:22", + "pins": { + "created": [], + "loaded": [] + }, + "skuNumber": "AUG-MD01", + "supportsEntryCodes": true, + "timeZone": "Pacific/Hawaii", + "zWaveEnabled": false +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_keys.json b/tests/components/yale/fixtures/get_lock.online_with_keys.json new file mode 100644 index 00000000000..4efcba44d09 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_keys.json @@ -0,0 +1,100 @@ +{ + "LockName": "Front Door Lock", + "Type": 2, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8064", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a9", + "serialNumber": "K1GXB0054L", + "lockID": "92412D1B44004595B5DEB134E151A8D4", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "slot": 1, + "key": "kkk01d4300c1dcxxx1c330f794941111", + "created": "2017-12-10T03:12:09.215Z", + "loaded": "2017-12-10T03:12:54.391Z" + } + ], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/get_lock.online_with_unlatch.json b/tests/components/yale/fixtures/get_lock.online_with_unlatch.json new file mode 100644 index 00000000000..288ab1a2f28 --- /dev/null +++ b/tests/components/yale/fixtures/get_lock.online_with_unlatch.json @@ -0,0 +1,94 @@ +{ + "LockName": "Lock online with unlatch supported", + "Type": 17, + "Created": "2024-03-14T18:03:09.003Z", + "Updated": "2024-03-14T18:03:09.003Z", + "LockID": "online_with_unlatch", + "HouseID": "mockhouseid1", + "HouseName": "Zuhause", + "Calibrated": false, + "timeZone": "Europe/Berlin", + "battery": 0.61, + "batteryInfo": { + "level": 0.61, + "warningState": "lock_state_battery_warning_none", + "infoUpdatedDate": "2024-04-30T17:55:09.045Z", + "lastChangeDate": "2024-03-15T07:04:00.000Z", + "lastChangeVoltage": 8350, + "state": "Mittel", + "icon": "https://app-resources.aaecosystem.com/images/lock_battery_state_medium.png" + }, + "hostHardwareID": "xxx", + "supportsEntryCodes": true, + "remoteOperateSecret": "xxxx", + "skuNumber": "NONE", + "macAddress": "DE:AD:BE:00:00:00", + "SerialNumber": "LPOC000000", + "LockStatus": { + "status": "locked", + "dateTime": "2024-04-30T18:41:25.673Z", + "isLockStatusChanged": false, + "valid": true, + "doorState": "init" + }, + "currentFirmwareVersion": "1.0.4", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "65f33445529187c78a100000", + "mfgBridgeID": "LPOCH0004Y", + "deviceModel": "august-lock", + "firmwareVersion": "1.0.4", + "operative": true, + "status": { + "current": "online", + "lastOnline": "2024-04-30T18:41:27.971Z", + "updated": "2024-04-30T18:41:27.971Z", + "lastOffline": "2024-04-25T14:41:40.118Z" + }, + "locks": [ + { + "_id": "656858c182e6c7c555faf758", + "LockID": "68895DD075A1444FAD4C00B273EEEF28", + "macAddress": "DE:AD:BE:EF:0B:BC" + } + ], + "hyperBridge": true + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "created": "2024-03-14T18:03:09.034Z", + "key": "055281d4aa9bd7b68c7b7bb78e2f34ca", + "slot": 1, + "UserID": "b4b44424-0000-0000-0000-25c224dad337", + "loaded": "2024-03-14T18:03:33.470Z" + } + ], + "deleted": [] + }, + "parametersToSet": {}, + "users": { + "b4b44424-0000-0000-0000-25c224dad337": { + "UserType": "superuser", + "FirstName": "m10x", + "LastName": "m10x", + "identifiers": ["phone:+494444444", "email:m10x@example.com"] + } + }, + "pubsubChannel": "pubsub", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + }, + "accessSchedulesAllowed": true +} diff --git a/tests/components/yale/fixtures/get_locks.json b/tests/components/yale/fixtures/get_locks.json new file mode 100644 index 00000000000..3fab55f82c9 --- /dev/null +++ b/tests/components/yale/fixtures/get_locks.json @@ -0,0 +1,16 @@ +{ + "A6697750D607098BAE8D6BAA11EF8063": { + "LockName": "Front Door Lock", + "UserType": "superuser", + "macAddress": "2E:BA:C4:14:3F:09", + "HouseID": "000000000000", + "HouseName": "A House" + }, + "A6697750D607098BAE8D6BAA11EF9999": { + "LockName": "Back Door Lock", + "UserType": "user", + "macAddress": "2E:BA:C4:14:3F:88", + "HouseID": "000000000011", + "HouseName": "A House" + } +} diff --git a/tests/components/yale/fixtures/jwt b/tests/components/yale/fixtures/jwt new file mode 100644 index 00000000000..d64f31b9bb2 --- /dev/null +++ b/tests/components/yale/fixtures/jwt @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6ImE3NmMyNWU1LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjE3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.BdRo-dEr-osbDQGB2XzlI-mIj4gqULtapODt-sj-eA8 diff --git a/tests/components/yale/fixtures/lock_open.json b/tests/components/yale/fixtures/lock_open.json new file mode 100644 index 00000000000..b6cfe3c90fc --- /dev/null +++ b/tests/components/yale/fixtures/lock_open.json @@ -0,0 +1,26 @@ +{ + "status": "kAugLockState_Locked", + "resultsFromOperationCache": false, + "retryCount": 1, + "info": { + "wlanRSSI": -54, + "lockType": "lock_version_1001", + "lockStatusChanged": false, + "serialNumber": "ABC", + "serial": "123", + "action": "lock", + "context": { + "startDate": "2020-02-19T01:59:39.516Z", + "retryCount": 1, + "transactionID": "mock" + }, + "bridgeID": "mock", + "wlanSNR": 41, + "startTime": "2020-02-19T01:59:39.517Z", + "duration": 5149, + "lockID": "ABC", + "rssi": -77 + }, + "totalTime": 5162, + "doorState": "kAugDoorState_Open" +} diff --git a/tests/components/yale/fixtures/lock_with_doorbell.online.json b/tests/components/yale/fixtures/lock_with_doorbell.online.json new file mode 100644 index 00000000000..bb2367d1111 --- /dev/null +++ b/tests/components/yale/fixtures/lock_with_doorbell.online.json @@ -0,0 +1,100 @@ +{ + "LockName": "Front Door Lock", + "Type": 7, + "Created": "2017-12-10T03:12:09.210Z", + "Updated": "2017-12-10T03:12:09.210Z", + "LockID": "A6697750D607098BAE8D6BAA11EF8063", + "HouseID": "000000000000", + "HouseName": "My House", + "Calibrated": false, + "skuNumber": "AUG-SL02-M02-S02", + "timeZone": "America/Vancouver", + "battery": 0.88, + "SerialNumber": "X2FSW05DGA", + "LockStatus": { + "status": "locked", + "doorState": "closed", + "dateTime": "2017-12-10T04:48:30.272Z", + "isLockStatusChanged": true, + "valid": true + }, + "currentFirmwareVersion": "109717e9-3.0.44-3.0.30", + "homeKitEnabled": false, + "zWaveEnabled": false, + "isGalileo": false, + "Bridge": { + "_id": "aaacab87f7efxa0015884999", + "mfgBridgeID": "AAGPP102XX", + "deviceModel": "august-doorbell", + "firmwareVersion": "2.3.0-RC153+201711151527", + "operative": true + }, + "keypad": { + "_id": "5bc65c24e6ef2a263e1450a8", + "serialNumber": "K1GXB0054Z", + "lockID": "92412D1B44004595B5DEB134E151A8D3", + "currentFirmwareVersion": "2.27.0", + "battery": {}, + "batteryLevel": "Medium", + "batteryRaw": 170 + }, + "OfflineKeys": { + "created": [], + "loaded": [ + { + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "slot": 1, + "key": "kkk01d4300c1dcxxx1c330f794941111", + "created": "2017-12-10T03:12:09.215Z", + "loaded": "2017-12-10T03:12:54.391Z" + } + ], + "deleted": [], + "loadedhk": [ + { + "key": "kkk01d4300c1dcxxx1c330f794941222", + "slot": 256, + "UserID": "cccca94e-373e-aaaa-bbbb-333396827777", + "created": "2017-12-10T03:12:09.218Z", + "loaded": "2017-12-10T03:12:55.563Z" + } + ] + }, + "parametersToSet": {}, + "users": { + "cccca94e-373e-aaaa-bbbb-333396827777": { + "UserType": "superuser", + "FirstName": "Foo", + "LastName": "Bar", + "identifiers": ["email:foo@bar.com", "phone:+177777777777"], + "imageInfo": { + "original": { + "width": 948, + "height": 949, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + }, + "thumbnail": { + "width": 128, + "height": 128, + "format": "jpg", + "url": "http://www.image.com/foo.jpeg", + "secure_url": "https://www.image.com/foo.jpeg" + } + } + } + }, + "pubsubChannel": "3333a674-ffff-aaaa-b351-b3a4473f3333", + "ruleHash": {}, + "cameras": [], + "geofenceLimits": { + "ios": { + "debounceInterval": 90, + "gpsAccuracyMultiplier": 2.5, + "maximumGeofence": 5000, + "minimumGeofence": 100, + "minGPSAccuracyRequired": 80 + } + } +} diff --git a/tests/components/yale/fixtures/reauth_jwt b/tests/components/yale/fixtures/reauth_jwt new file mode 100644 index 00000000000..4db8d061b68 --- /dev/null +++ b/tests/components/yale/fixtures/reauth_jwt @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6ImE3NmMyNWU1LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjI3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.DtkHscsvbTE-SyKW3RxwXFQIKMf0xJwfPZN1X3JesqA diff --git a/tests/components/yale/fixtures/reauth_jwt_wrong_account b/tests/components/yale/fixtures/reauth_jwt_wrong_account new file mode 100644 index 00000000000..b0b62438178 --- /dev/null +++ b/tests/components/yale/fixtures/reauth_jwt_wrong_account @@ -0,0 +1 @@ +eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJpbnN0YWxsSWQiOiIiLCJyZWdpb24iOiJpcmVsYW5kLXByb2QtYXdzIiwiYXBwbGljYXRpb25JZCI6IiIsInVzZXJJZCI6IjQ0NDQ0NDQ0LTQ5YWEtNGMxNC1jZDBjLTQ4YTY5MzFlMjA4MSIsInZJbnN0YWxsSWQiOmZhbHNlLCJ2UGFzc3dvcmQiOnRydWUsInZFbWFpbCI6dHJ1ZSwidlBob25lIjp0cnVlLCJoYXNJbnN0YWxsSWQiOmZhbHNlLCJoYXNQYXNzd29yZCI6ZmFsc2UsImhhc0VtYWlsIjpmYWxzZSwiaGFzUGhvbmUiOmZhbHNlLCJpc0xvY2tlZE91dCI6ZmFsc2UsImNhcHRjaGEiOiIiLCJlbWFpbCI6W10sInBob25lIjpbXSwiZXhwaXJlc0F0IjoiMjAyNC0xMi0xOFQxMzo1NDowNS4xMzRaIiwidGVtcG9yYXJ5QWNjb3VudENyZWF0aW9uUGFzc3dvcmRMaW5rIjoiIiwiaWF0IjoxNzI0MTYyMDQ1LCJleHAiOjE3MzQ1MzAwNDUsIm9hdXRoIjp7ImFwcF9uYW1lIjoiSG9tZSBBc3Npc3RhbnQiLCJjbGllbnRfaWQiOiJiM2NkM2YwYi1mYjk3LTRkNmMtYmVlOS1hZjdhYjA0NzU4YzciLCJyZWRpcmVjdF91cmkiOiJodHRwczovL2FjY291bnQtbGluay5uYWJ1Y2FzYS5jb20vYXV0aG9yaXplX2NhbGxiYWNrIiwicGFydG5lcl9pZCI6IjY1Nzk3NDg4MTA2NmNhNDhjOTljMDgyNiJ9fQ.PenDp4JUIBQZEx2BFxaCqV1-6yMuUPtmnB6jq1wpoX8 diff --git a/tests/components/yale/fixtures/unlock_closed.json b/tests/components/yale/fixtures/unlock_closed.json new file mode 100644 index 00000000000..f676c005a17 --- /dev/null +++ b/tests/components/yale/fixtures/unlock_closed.json @@ -0,0 +1,26 @@ +{ + "status": "kAugLockState_Unlocked", + "resultsFromOperationCache": false, + "retryCount": 1, + "info": { + "wlanRSSI": -54, + "lockType": "lock_version_1001", + "lockStatusChanged": false, + "serialNumber": "ABC", + "serial": "123", + "action": "lock", + "context": { + "startDate": "2020-02-19T01:59:39.516Z", + "retryCount": 1, + "transactionID": "mock" + }, + "bridgeID": "mock", + "wlanSNR": 41, + "startTime": "2020-02-19T01:59:39.517Z", + "duration": 5149, + "lockID": "ABC", + "rssi": -77 + }, + "totalTime": 5162, + "doorState": "kAugDoorState_Closed" +} diff --git a/tests/components/yale/mocks.py b/tests/components/yale/mocks.py new file mode 100644 index 00000000000..03ab3609002 --- /dev/null +++ b/tests/components/yale/mocks.py @@ -0,0 +1,515 @@ +"""Mocks for the yale component.""" + +from __future__ import annotations + +from collections.abc import Iterable +from contextlib import contextmanager +import json +import os +import time +from typing import Any +from unittest.mock import AsyncMock, MagicMock, PropertyMock, patch + +from yalexs.activity import ( + ACTIVITY_ACTIONS_BRIDGE_OPERATION, + ACTIVITY_ACTIONS_DOOR_OPERATION, + ACTIVITY_ACTIONS_DOORBELL_DING, + ACTIVITY_ACTIONS_DOORBELL_MOTION, + ACTIVITY_ACTIONS_DOORBELL_VIEW, + ACTIVITY_ACTIONS_LOCK_OPERATION, + SOURCE_LOCK_OPERATE, + SOURCE_LOG, + Activity, + BridgeOperationActivity, + DoorbellDingActivity, + DoorbellMotionActivity, + DoorbellViewActivity, + DoorOperationActivity, + LockOperationActivity, +) +from yalexs.api_async import ApiAsync +from yalexs.authenticator_common import Authentication, AuthenticationState +from yalexs.const import Brand +from yalexs.doorbell import Doorbell, DoorbellDetail +from yalexs.lock import Lock, LockDetail +from yalexs.manager.ratelimit import _RateLimitChecker +from yalexs.manager.socketio import SocketIORunner + +from homeassistant.components.application_credentials import ( + ClientCredential, + async_import_client_credential, +) +from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import ConfigEntry +from homeassistant.core import HomeAssistant +from homeassistant.setup import async_setup_component + +from tests.common import MockConfigEntry, load_fixture + +USER_ID = "a76c25e5-49aa-4c14-cd0c-48a6931e2081" + + +def _mock_get_config( + brand: Brand = Brand.YALE_GLOBAL, jwt: str | None = None +) -> dict[str, Any]: + """Return a default yale config.""" + return { + DOMAIN: { + "auth_implementation": "yale", + "token": { + "access_token": jwt or "access_token", + "expires_in": 1, + "refresh_token": "refresh_token", + "expires_at": time.time() + 3600, + "service": "yale", + }, + } + } + + +def _mock_authenticator(auth_state: AuthenticationState) -> Authentication: + """Mock an yale authenticator.""" + authenticator = MagicMock() + type(authenticator).state = PropertyMock(return_value=auth_state) + return authenticator + + +def _timetoken() -> str: + return str(time.time_ns())[:-2] + + +async def mock_yale_config_entry( + hass: HomeAssistant, +) -> MockConfigEntry: + """Mock yale config entry and client credentials.""" + entry = mock_config_entry() + entry.add_to_hass(hass) + return entry + + +def mock_config_entry(jwt: str | None = None) -> MockConfigEntry: + """Return the default mocked config entry.""" + return MockConfigEntry( + domain=DOMAIN, + data=_mock_get_config(jwt=jwt)[DOMAIN], + options={}, + unique_id=USER_ID, + ) + + +async def mock_client_credentials(hass: HomeAssistant) -> ClientCredential: + """Mock client credentials.""" + assert await async_setup_component(hass, "application_credentials", {}) + await async_import_client_credential( + hass, + DOMAIN, + ClientCredential("1", "2"), + DOMAIN, + ) + + +@contextmanager +def patch_yale_setup(): + """Patch yale setup process.""" + with ( + patch("yalexs.manager.gateway.ApiAsync") as api_mock, + patch.object(_RateLimitChecker, "register_wakeup") as authenticate_mock, + patch("yalexs.manager.data.SocketIORunner") as socketio_mock, + patch.object(socketio_mock, "run"), + patch( + "homeassistant.components.yale.config_entry_oauth2_flow.async_get_config_entry_implementation" + ), + ): + yield api_mock, authenticate_mock, socketio_mock + + +async def _mock_setup_yale( + hass: HomeAssistant, + api_instance: ApiAsync, + socketio_mock: SocketIORunner, + authenticate_side_effect: MagicMock, +) -> ConfigEntry: + """Set up yale integration.""" + entry = await mock_yale_config_entry(hass) + with patch_yale_setup() as patched_setup: + api_mock, authenticate_mock, sockio_mock_ = patched_setup + authenticate_mock.side_effect = authenticate_side_effect + sockio_mock_.return_value = socketio_mock + api_mock.return_value = api_instance + await hass.config_entries.async_setup(entry.entry_id) + await hass.async_block_till_done() + return entry + + +async def _create_yale_with_devices( + hass: HomeAssistant, + devices: Iterable[LockDetail | DoorbellDetail] | None = None, + api_call_side_effects: dict[str, Any] | None = None, + activities: list[Any] | None = None, + brand: Brand = Brand.YALE_GLOBAL, + authenticate_side_effect: MagicMock | None = None, +) -> tuple[ConfigEntry, SocketIORunner]: + entry, _, socketio = await _create_yale_api_with_devices( + hass, + devices, + api_call_side_effects, + activities, + brand, + authenticate_side_effect, + ) + return entry, socketio + + +async def _create_yale_api_with_devices( + hass: HomeAssistant, + devices: Iterable[LockDetail | DoorbellDetail] | None = None, + api_call_side_effects: dict[str, Any] | None = None, + activities: dict[str, Any] | None = None, + brand: Brand = Brand.YALE_GLOBAL, + authenticate_side_effect: MagicMock | None = None, +) -> tuple[ConfigEntry, ApiAsync, SocketIORunner]: + if api_call_side_effects is None: + api_call_side_effects = {} + if devices is None: + devices = () + + update_api_call_side_effects(api_call_side_effects, devices, activities) + + api_instance = await make_mock_api(api_call_side_effects, brand) + socketio = SocketIORunner( + MagicMock( + api=api_instance, async_get_access_token=AsyncMock(return_value="token") + ) + ) + socketio.run = AsyncMock() + + entry = await _mock_setup_yale( + hass, + api_instance, + socketio, + authenticate_side_effect=authenticate_side_effect, + ) + + return entry, api_instance, socketio + + +def update_api_call_side_effects( + api_call_side_effects: dict[str, Any], + devices: Iterable[LockDetail | DoorbellDetail], + activities: dict[str, Any] | None = None, +) -> None: + """Update side effects dict from devices and activities.""" + + device_data = {"doorbells": [], "locks": []} + for device in devices or (): + if isinstance(device, LockDetail): + device_data["locks"].append( + {"base": _mock_yale_lock(device.device_id), "detail": device} + ) + elif isinstance(device, DoorbellDetail): + device_data["doorbells"].append( + { + "base": _mock_yale_doorbell( + deviceid=device.device_id, + brand=device._data.get("brand", Brand.YALE_GLOBAL), + ), + "detail": device, + } + ) + else: + raise ValueError # noqa: TRY004 + + def _get_device_detail(device_type, device_id): + for device in device_data[device_type]: + if device["detail"].device_id == device_id: + return device["detail"] + raise ValueError + + def _get_base_devices(device_type): + return [device["base"] for device in device_data[device_type]] + + def get_lock_detail_side_effect(access_token, device_id): + return _get_device_detail("locks", device_id) + + def get_doorbell_detail_side_effect(access_token, device_id): + return _get_device_detail("doorbells", device_id) + + def get_operable_locks_side_effect(access_token): + return _get_base_devices("locks") + + def get_doorbells_side_effect(access_token): + return _get_base_devices("doorbells") + + def get_house_activities_side_effect(access_token, house_id, limit=10): + if activities is not None: + return activities + return [] + + def lock_return_activities_side_effect(access_token, device_id): + lock = _get_device_detail("locks", device_id) + return [ + # There is a check to prevent out of order events + # so we set the doorclosed & lock event in the future + # to prevent a race condition where we reject the event + # because it happened before the dooropen & unlock event. + _mock_lock_operation_activity(lock, "lock", 2000), + _mock_door_operation_activity(lock, "doorclosed", 2000), + ] + + def unlock_return_activities_side_effect(access_token, device_id): + lock = _get_device_detail("locks", device_id) + return [ + _mock_lock_operation_activity(lock, "unlock", 0), + _mock_door_operation_activity(lock, "dooropen", 0), + ] + + api_call_side_effects.setdefault("get_lock_detail", get_lock_detail_side_effect) + api_call_side_effects.setdefault( + "get_doorbell_detail", get_doorbell_detail_side_effect + ) + api_call_side_effects.setdefault( + "get_operable_locks", get_operable_locks_side_effect + ) + api_call_side_effects.setdefault("get_doorbells", get_doorbells_side_effect) + api_call_side_effects.setdefault( + "get_house_activities", get_house_activities_side_effect + ) + api_call_side_effects.setdefault( + "lock_return_activities", lock_return_activities_side_effect + ) + api_call_side_effects.setdefault( + "unlock_return_activities", unlock_return_activities_side_effect + ) + api_call_side_effects.setdefault( + "async_unlatch_return_activities", unlock_return_activities_side_effect + ) + + +async def make_mock_api( + api_call_side_effects: dict[str, Any], + brand: Brand = Brand.YALE_GLOBAL, +) -> ApiAsync: + """Make a mock ApiAsync instance.""" + api_instance = MagicMock(name="Api", brand=brand) + + if api_call_side_effects["get_lock_detail"]: + type(api_instance).async_get_lock_detail = AsyncMock( + side_effect=api_call_side_effects["get_lock_detail"] + ) + + if api_call_side_effects["get_operable_locks"]: + type(api_instance).async_get_operable_locks = AsyncMock( + side_effect=api_call_side_effects["get_operable_locks"] + ) + + if api_call_side_effects["get_doorbells"]: + type(api_instance).async_get_doorbells = AsyncMock( + side_effect=api_call_side_effects["get_doorbells"] + ) + + if api_call_side_effects["get_doorbell_detail"]: + type(api_instance).async_get_doorbell_detail = AsyncMock( + side_effect=api_call_side_effects["get_doorbell_detail"] + ) + + if api_call_side_effects["get_house_activities"]: + type(api_instance).async_get_house_activities = AsyncMock( + side_effect=api_call_side_effects["get_house_activities"] + ) + + if api_call_side_effects["lock_return_activities"]: + type(api_instance).async_lock_return_activities = AsyncMock( + side_effect=api_call_side_effects["lock_return_activities"] + ) + + if api_call_side_effects["unlock_return_activities"]: + type(api_instance).async_unlock_return_activities = AsyncMock( + side_effect=api_call_side_effects["unlock_return_activities"] + ) + + if api_call_side_effects["async_unlatch_return_activities"]: + type(api_instance).async_unlatch_return_activities = AsyncMock( + side_effect=api_call_side_effects["async_unlatch_return_activities"] + ) + + api_instance.async_unlock_async = AsyncMock() + api_instance.async_lock_async = AsyncMock() + api_instance.async_status_async = AsyncMock() + api_instance.async_get_user = AsyncMock(return_value={"UserID": "abc"}) + api_instance.async_unlatch_async = AsyncMock() + api_instance.async_unlatch = AsyncMock() + api_instance.async_add_websocket_subscription = AsyncMock() + + return api_instance + + +def _mock_yale_authentication( + token_text: str, token_timestamp: float, state: AuthenticationState +) -> Authentication: + authentication = MagicMock(name="yalexs.authentication") + type(authentication).state = PropertyMock(return_value=state) + type(authentication).access_token = PropertyMock(return_value=token_text) + type(authentication).access_token_expires = PropertyMock( + return_value=token_timestamp + ) + return authentication + + +def _mock_yale_lock(lockid: str = "mocklockid1", houseid: str = "mockhouseid1") -> Lock: + return Lock(lockid, _mock_yale_lock_data(lockid=lockid, houseid=houseid)) + + +def _mock_yale_doorbell( + deviceid="mockdeviceid1", houseid="mockhouseid1", brand=Brand.YALE_GLOBAL +) -> Doorbell: + return Doorbell( + deviceid, + _mock_yale_doorbell_data(deviceid=deviceid, houseid=houseid, brand=brand), + ) + + +def _mock_yale_doorbell_data( + deviceid: str = "mockdeviceid1", + houseid: str = "mockhouseid1", + brand: Brand = Brand.YALE_GLOBAL, +) -> dict[str, Any]: + return { + "_id": deviceid, + "DeviceID": deviceid, + "name": f"{deviceid} Name", + "HouseID": houseid, + "UserType": "owner", + "serialNumber": "mockserial", + "battery": 90, + "status": "standby", + "currentFirmwareVersion": "mockfirmware", + "Bridge": { + "_id": "bridgeid1", + "firmwareVersion": "mockfirm", + "operative": True, + }, + "LockStatus": {"doorState": "open"}, + } + + +def _mock_yale_lock_data( + lockid: str = "mocklockid1", houseid: str = "mockhouseid1" +) -> dict[str, Any]: + return { + "_id": lockid, + "LockID": lockid, + "LockName": f"{lockid} Name", + "HouseID": houseid, + "UserType": "owner", + "SerialNumber": "mockserial", + "battery": 90, + "currentFirmwareVersion": "mockfirmware", + "Bridge": { + "_id": "bridgeid1", + "firmwareVersion": "mockfirm", + "operative": True, + }, + "LockStatus": {"doorState": "open"}, + } + + +async def _mock_operative_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online.json") + + +async def _mock_lock_with_offline_key(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_keys.json") + + +async def _mock_inoperative_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.offline.json") + + +async def _mock_activities_from_fixture( + hass: HomeAssistant, path: str +) -> list[Activity]: + json_dict = await _load_json_fixture(hass, path) + activities = [] + for activity_json in json_dict: + activity = _activity_from_dict(activity_json) + if activity: + activities.append(activity) + + return activities + + +async def _mock_lock_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: + json_dict = await _load_json_fixture(hass, path) + return LockDetail(json_dict) + + +async def _mock_doorbell_from_fixture(hass: HomeAssistant, path: str) -> LockDetail: + json_dict = await _load_json_fixture(hass, path) + return DoorbellDetail(json_dict) + + +async def _load_json_fixture(hass: HomeAssistant, path: str) -> dict[str, Any]: + fixture = await hass.async_add_executor_job( + load_fixture, os.path.join("yale", path) + ) + return json.loads(fixture) + + +async def _mock_doorsense_enabled_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_doorsense.json") + + +async def _mock_doorsense_missing_yale_lock_detail(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_missing_doorsense.json") + + +async def _mock_lock_with_unlatch(hass: HomeAssistant) -> LockDetail: + return await _mock_lock_from_fixture(hass, "get_lock.online_with_unlatch.json") + + +def _mock_lock_operation_activity( + lock: Lock, action: str, offset: float +) -> LockOperationActivity: + return LockOperationActivity( + SOURCE_LOCK_OPERATE, + { + "dateTime": (time.time() + offset) * 1000, + "deviceID": lock.device_id, + "deviceType": "lock", + "action": action, + }, + ) + + +def _mock_door_operation_activity( + lock: Lock, action: str, offset: float +) -> DoorOperationActivity: + return DoorOperationActivity( + SOURCE_LOCK_OPERATE, + { + "dateTime": (time.time() + offset) * 1000, + "deviceID": lock.device_id, + "deviceType": "lock", + "action": action, + }, + ) + + +def _activity_from_dict(activity_dict: dict[str, Any]) -> Activity | None: + action = activity_dict.get("action") + + activity_dict["dateTime"] = time.time() * 1000 + + if action in ACTIVITY_ACTIONS_DOORBELL_DING: + return DoorbellDingActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOORBELL_MOTION: + return DoorbellMotionActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOORBELL_VIEW: + return DoorbellViewActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_LOCK_OPERATION: + return LockOperationActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_DOOR_OPERATION: + return DoorOperationActivity(SOURCE_LOG, activity_dict) + if action in ACTIVITY_ACTIONS_BRIDGE_OPERATION: + return BridgeOperationActivity(SOURCE_LOG, activity_dict) + return None diff --git a/tests/components/yale/snapshots/test_binary_sensor.ambr b/tests/components/yale/snapshots/test_binary_sensor.ambr new file mode 100644 index 00000000000..e294cb7c76c --- /dev/null +++ b/tests/components/yale/snapshots/test_binary_sensor.ambr @@ -0,0 +1,33 @@ +# serializer version: 1 +# name: test_doorbell_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'tmt100_name', + 'config_entries': , + 'configuration_url': 'https://account.aaecosystem.com', + 'connections': set({ + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'yale', + 'tmt100', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Yale Home Inc.', + 'model': 'hydra1', + 'model_id': None, + 'name': 'tmt100 Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'tmt100 Name', + 'sw_version': '3.1.0-HYDRC75+201909251139', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/yale/snapshots/test_diagnostics.ambr b/tests/components/yale/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..c3d8d8e2aaa --- /dev/null +++ b/tests/components/yale/snapshots/test_diagnostics.ambr @@ -0,0 +1,125 @@ +# serializer version: 1 +# name: test_diagnostics + dict({ + 'brand': 'yale_global', + 'doorbells': dict({ + 'K98GiDT45GUL': dict({ + 'HouseID': '**REDACTED**', + 'LockID': 'BBBB1F5F11114C24CCCC97571DD6AAAA', + 'appID': 'august-iphone', + 'caps': list([ + 'reconnect', + ]), + 'createdAt': '2016-11-26T22:27:11.176Z', + 'doorbellID': 'K98GiDT45GUL', + 'doorbellServerURL': 'https://doorbells.august.com', + 'dvrSubscriptionSetupDone': True, + 'firmwareVersion': '2.3.0-RC153+201711151527', + 'installDate': '2016-11-26T22:27:11.176Z', + 'installUserID': '**REDACTED**', + 'name': 'Front Door', + 'pubsubChannel': '**REDACTED**', + 'recentImage': '**REDACTED**', + 'serialNumber': 'tBXZR0Z35E', + 'settings': dict({ + 'ABREnabled': True, + 'IREnabled': True, + 'IVAEnabled': False, + 'JPGQuality': 70, + 'batteryLowThreshold': 3.1, + 'batteryRun': False, + 'batteryUseThreshold': 3.4, + 'bitrateCeiling': 512000, + 'buttonpush_notifications': True, + 'debug': False, + 'directLink': True, + 'initialBitrate': 384000, + 'irConfiguration': 8448272, + 'keepEncoderRunning': True, + 'micVolume': 100, + 'minACNoScaling': 40, + 'motion_notifications': True, + 'notify_when_offline': True, + 'overlayEnabled': True, + 'ringSoundEnabled': True, + 'speakerVolume': 92, + 'turnOffCamera': False, + 'videoResolution': '640x480', + }), + 'status': 'doorbell_call_status_online', + 'status_timestamp': 1512811834532, + 'telemetry': dict({ + 'BSSID': '88:ee:00:dd:aa:11', + 'SSID': 'foo_ssid', + 'ac_in': 23.856874, + 'battery': 4.061763, + 'battery_soc': 96, + 'battery_soh': 95, + 'date': '2017-12-10 08:05:12', + 'doorbell_low_battery': False, + 'ip_addr': '10.0.1.11', + 'link_quality': 54, + 'load_average': '0.50 0.47 0.35 1/154 9345', + 'signal_level': -56, + 'steady_ac_in': 22.196405, + 'temperature': 28.25, + 'updated_at': '2017-12-10T08:05:13.650Z', + 'uptime': '16168.75 13830.49', + 'wifi_freq': 5745, + }), + 'updatedAt': '2017-12-10T08:05:13.650Z', + }), + }), + 'locks': dict({ + 'online_with_doorsense': dict({ + 'Bridge': dict({ + '_id': 'bridgeid', + 'deviceModel': 'august-connect', + 'firmwareVersion': '2.2.1', + 'hyperBridge': True, + 'mfgBridgeID': 'C5WY200WSH', + 'operative': True, + 'status': dict({ + 'current': 'online', + 'lastOffline': '2000-00-00T00:00:00.447Z', + 'lastOnline': '2000-00-00T00:00:00.447Z', + 'updated': '2000-00-00T00:00:00.447Z', + }), + }), + 'Calibrated': False, + 'Created': '2000-00-00T00:00:00.447Z', + 'HouseID': '**REDACTED**', + 'HouseName': 'Test', + 'LockID': 'online_with_doorsense', + 'LockName': 'Online door with doorsense', + 'LockStatus': dict({ + 'dateTime': '2017-12-10T04:48:30.272Z', + 'doorState': 'open', + 'isLockStatusChanged': False, + 'status': 'locked', + 'valid': True, + }), + 'SerialNumber': 'XY', + 'Type': 1001, + 'Updated': '2000-00-00T00:00:00.447Z', + 'battery': 0.922, + 'currentFirmwareVersion': 'undefined-4.3.0-1.8.14', + 'homeKitEnabled': True, + 'hostLockInfo': dict({ + 'manufacturer': 'yale', + 'productID': 1536, + 'productTypeID': 32770, + 'serialNumber': 'ABC', + }), + 'isGalileo': False, + 'macAddress': '12:22', + 'pins': '**REDACTED**', + 'pubsubChannel': '**REDACTED**', + 'skuNumber': 'AUG-MD01', + 'supportsEntryCodes': True, + 'timeZone': 'Pacific/Hawaii', + 'zWaveEnabled': False, + }), + }), + }) +# --- diff --git a/tests/components/yale/snapshots/test_lock.ambr b/tests/components/yale/snapshots/test_lock.ambr new file mode 100644 index 00000000000..b1a9f6a4d86 --- /dev/null +++ b/tests/components/yale/snapshots/test_lock.ambr @@ -0,0 +1,37 @@ +# serializer version: 1 +# name: test_lock_device_registry + DeviceRegistryEntrySnapshot({ + 'area_id': 'online_with_doorsense_name', + 'config_entries': , + 'configuration_url': 'https://account.aaecosystem.com', + 'connections': set({ + tuple( + 'bluetooth', + '12:22', + ), + }), + 'disabled_by': None, + 'entry_type': None, + 'hw_version': None, + 'id': , + 'identifiers': set({ + tuple( + 'yale', + 'online_with_doorsense', + ), + }), + 'is_new': False, + 'labels': set({ + }), + 'manufacturer': 'Yale Home Inc.', + 'model': 'AUG-MD01', + 'model_id': None, + 'name': 'online_with_doorsense Name', + 'name_by_user': None, + 'primary_config_entry': , + 'serial_number': None, + 'suggested_area': 'online_with_doorsense Name', + 'sw_version': 'undefined-4.3.0-1.8.14', + 'via_device_id': None, + }) +# --- diff --git a/tests/components/yale/snapshots/test_sensor.ambr b/tests/components/yale/snapshots/test_sensor.ambr new file mode 100644 index 00000000000..a425cfa90de --- /dev/null +++ b/tests/components/yale/snapshots/test_sensor.ambr @@ -0,0 +1,95 @@ +# serializer version: 1 +# name: test_lock_operator_autorelock + ReadOnlyDict({ + 'autorelock': True, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'autorelock', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_keypad + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': True, + 'manual': False, + 'method': 'keypad', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_manual + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': True, + 'method': 'manual', + 'remote': False, + 'tag': False, + }) +# --- +# name: test_lock_operator_remote + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'remote', + 'remote': True, + 'tag': False, + }) +# --- +# name: test_restored_state + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'autorelock': False, + 'entity_picture': 'image.png', + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'tag', + 'remote': False, + 'tag': True, + }), + 'context': , + 'entity_id': 'sensor.online_with_doorsense_name_operator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Tag Unlock', + }) +# --- +# name: test_unlock_operator_manual + StateSnapshot({ + 'attributes': ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': True, + 'method': 'manual', + 'remote': False, + 'tag': False, + }), + 'context': , + 'entity_id': 'sensor.online_with_doorsense_name_operator', + 'last_changed': , + 'last_reported': , + 'last_updated': , + 'state': 'Your favorite elven princess', + }) +# --- +# name: test_unlock_operator_tag + ReadOnlyDict({ + 'autorelock': False, + 'friendly_name': 'online_with_doorsense Name Operator', + 'keypad': False, + 'manual': False, + 'method': 'tag', + 'remote': False, + 'tag': True, + }) +# --- diff --git a/tests/components/yale/test_binary_sensor.py b/tests/components/yale/test_binary_sensor.py new file mode 100644 index 00000000000..811c845e359 --- /dev/null +++ b/tests/components/yale/test_binary_sensor.py @@ -0,0 +1,308 @@ +"""The binary_sensor tests for the yale platform.""" + +import datetime + +from freezegun.api import FrozenDateTimeFactory +from syrupy import SnapshotAssertion + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_UNLOCK, + STATE_OFF, + STATE_ON, + STATE_UNAVAILABLE, +) +from homeassistant.core import HomeAssistant +from homeassistant.helpers import device_registry as dr +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, +) + +from tests.common import async_fire_time_changed + + +async def test_doorsense(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + await _create_yale_with_devices(hass, [lock_one]) + states = hass.states + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF + ) + + +async def test_lock_bridge_offline(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_offline.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + states = hass.states + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state + == STATE_UNAVAILABLE + ) + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + ) + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + ) + + +async def test_create_doorbell_offline(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + states = hass.states + assert states.get("binary_sensor.tmt100_name_motion").state == STATE_UNAVAILABLE + assert states.get("binary_sensor.tmt100_name_connectivity").state == STATE_OFF + assert ( + states.get("binary_sensor.tmt100_name_doorbell_ding").state == STATE_UNAVAILABLE + ) + + +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + activities = await _mock_activities_from_fixture( + hass, "get_activity.doorbell_motion.json" + ) + await _create_yale_with_devices(hass, [doorbell_one], activities=activities) + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert states.get("binary_sensor.k98gidt45gul_name_connectivity").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + + +async def test_doorbell_update_via_socketio( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test creation of a doorbell that can be updated via socketio.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) + assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" + states = hass.states + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_OFF + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + + listener = list(socketio._listeners)[0] + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "imagecapture", + "data": { + "result": { + "created_at": "2021-03-16T01:07:08.817Z", + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/zip/images/zip.jpeg" + ), + }, + }, + }, + ) + + await hass.async_block_till_done() + + assert states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_ON + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "doorbell_motion_detected", + "data": { + "event": "doorbell_motion_detected", + "image": { + "height": 640, + "width": 480, + "format": "jpg", + "created_at": "2021-03-16T02:36:26.886Z", + "bytes": 14061, + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" + ), + "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", + "etag": "09e839331c4ea59eef28081f2caa0e90", + }, + "doorbellName": "Front Door", + "callID": None, + "origin": "mars-api", + "mutableContent": True, + }, + }, + ) + + await hass.async_block_till_done() + + assert states.get("binary_sensor.k98gidt45gul_name_motion").state == STATE_ON + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + states.get("binary_sensor.k98gidt45gul_name_image_capture").state == STATE_OFF + ) + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "buttonpush", + }, + ) + + await hass.async_block_till_done() + + assert states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_ON + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + assert ( + states.get("binary_sensor.k98gidt45gul_name_doorbell_ding").state == STATE_OFF + ) + + +async def test_doorbell_device_registry( + hass: HomeAssistant, + device_registry: dr.DeviceRegistry, + snapshot: SnapshotAssertion, +) -> None: + """Test creation of a lock with doorsense and bridge ands up in the registry.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + reg_device = device_registry.async_get_device(identifiers={("yale", "tmt100")}) + assert reg_device == snapshot + + +async def test_door_sense_update_via_socketio(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + config_entry, socketio = await _create_yale_with_devices( + hass, [lock_one], activities=activities + ) + states = hass.states + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Unlocking", "doorState": "closed"}, + ) + + await hass.async_block_till_done() + + assert ( + states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_OFF + ) + + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Locking", "doorState": "open"}, + ) + + await hass.async_block_till_done() + + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + socketio.connected = True + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + # Ensure socketio status is always preserved + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + listener( + lock_one.device_id, + dt_util.utcnow(), + {"status": "kAugLockState_Unlocking", "doorState": "open"}, + ) + + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) + await hass.async_block_till_done() + assert states.get("binary_sensor.online_with_doorsense_name_door").state == STATE_ON + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + +async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: + """Test creation of a lock with a doorbell.""" + lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") + await _create_yale_with_devices(hass, [lock_one]) + states = hass.states + assert ( + states.get( + "binary_sensor.a6697750d607098bae8d6baa11ef8063_name_doorbell_ding" + ).state + == STATE_OFF + ) diff --git a/tests/components/yale/test_button.py b/tests/components/yale/test_button.py new file mode 100644 index 00000000000..92d3ecef859 --- /dev/null +++ b/tests/components/yale/test_button.py @@ -0,0 +1,23 @@ +"""The button tests for the yale platform.""" + +from homeassistant.components.button import DOMAIN as BUTTON_DOMAIN, SERVICE_PRESS +from homeassistant.const import ATTR_ENTITY_ID +from homeassistant.core import HomeAssistant + +from .mocks import _create_yale_api_with_devices, _mock_lock_from_fixture + + +async def test_wake_lock(hass: HomeAssistant) -> None: + """Test creation of a lock and wake it.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + _, api_instance, _ = await _create_yale_api_with_devices(hass, [lock_one]) + entity_id = "button.online_with_doorsense_name_wake" + binary_sensor_online_with_doorsense_name = hass.states.get(entity_id) + assert binary_sensor_online_with_doorsense_name is not None + api_instance.async_status_async.reset_mock() + await hass.services.async_call( + BUTTON_DOMAIN, SERVICE_PRESS, {ATTR_ENTITY_ID: entity_id}, blocking=True + ) + api_instance.async_status_async.assert_called_once() diff --git a/tests/components/yale/test_camera.py b/tests/components/yale/test_camera.py new file mode 100644 index 00000000000..502945b19c1 --- /dev/null +++ b/tests/components/yale/test_camera.py @@ -0,0 +1,93 @@ +"""The camera tests for the yale platform.""" + +from http import HTTPStatus +from unittest.mock import patch + +from yalexs.const import Brand +from yalexs.doorbell import ContentTokenExpired + +from homeassistant.const import STATE_IDLE +from homeassistant.core import HomeAssistant + +from .mocks import _create_yale_with_devices, _mock_doorbell_from_fixture + +from tests.typing import ClientSessionGenerator + + +async def test_create_doorbell( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + with patch.object( + doorbell_one, "async_get_doorbell_image", create=False, return_value="image" + ): + await _create_yale_with_devices(hass, [doorbell_one], brand=Brand.YALE_GLOBAL) + + camera_k98gidt45gul_name_camera = hass.states.get( + "camera.k98gidt45gul_name_camera" + ) + assert camera_k98gidt45gul_name_camera.state == STATE_IDLE + + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.OK + body = await resp.text() + assert body == "image" + + +async def test_doorbell_refresh_content_token_recover( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test camera image content token expired.""" + doorbell_two = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + with patch.object( + doorbell_two, + "async_get_doorbell_image", + create=False, + side_effect=[ContentTokenExpired, "image"], + ): + await _create_yale_with_devices( + hass, + [doorbell_two], + brand=Brand.YALE_GLOBAL, + ) + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.OK + body = await resp.text() + assert body == "image" + + +async def test_doorbell_refresh_content_token_fail( + hass: HomeAssistant, hass_client_no_auth: ClientSessionGenerator +) -> None: + """Test camera image content token expired.""" + doorbell_two = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + with patch.object( + doorbell_two, + "async_get_doorbell_image", + create=False, + side_effect=ContentTokenExpired, + ): + await _create_yale_with_devices( + hass, + [doorbell_two], + brand=Brand.YALE_GLOBAL, + ) + url = hass.states.get("camera.k98gidt45gul_name_camera").attributes[ + "entity_picture" + ] + + client = await hass_client_no_auth() + resp = await client.get(url) + assert resp.status == HTTPStatus.INTERNAL_SERVER_ERROR diff --git a/tests/components/yale/test_config_flow.py b/tests/components/yale/test_config_flow.py new file mode 100644 index 00000000000..163f8240553 --- /dev/null +++ b/tests/components/yale/test_config_flow.py @@ -0,0 +1,275 @@ +"""Test the yale config flow.""" + +from collections.abc import Generator +from unittest.mock import ANY, Mock, patch + +import pytest + +from homeassistant.components.yale.application_credentials import ( + OAUTH2_AUTHORIZE, + OAUTH2_TOKEN, +) +from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import SOURCE_USER +from homeassistant.core import HomeAssistant +from homeassistant.data_entry_flow import FlowResultType +from homeassistant.helpers import config_entry_oauth2_flow + +from .mocks import USER_ID + +from tests.common import MockConfigEntry +from tests.test_util.aiohttp import AiohttpClientMocker +from tests.typing import ClientSessionGenerator + +CLIENT_ID = "1" + + +@pytest.fixture +def mock_setup_entry() -> Generator[Mock, None, None]: + """Patch setup entry.""" + with patch( + "homeassistant.components.yale.async_setup_entry", return_value=True + ) as mock_setup_entry: + yield mock_setup_entry + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + jwt: str, + mock_setup_entry: Mock, +) -> None: + """Check full flow.""" + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": jwt, + "scope": "any", + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": "mock-user-id", + "expires_at": 1697753347, + }, + ) + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + assert len(mock_setup_entry.mock_calls) == 1 + entry = hass.config_entries.async_entries(DOMAIN)[0] + assert entry.unique_id == USER_ID + + assert result2["type"] is FlowResultType.CREATE_ENTRY + assert result2["result"].unique_id == USER_ID + assert entry.data == { + "auth_implementation": "yale", + "token": { + "access_token": jwt, + "expires_at": ANY, + "expires_in": ANY, + "refresh_token": "mock-refresh-token", + "scope": "any", + "user_id": "mock-user-id", + }, + } + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_full_flow_already_exists( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + jwt: str, + mock_setup_entry: Mock, + mock_config_entry: MockConfigEntry, +) -> None: + """Check full flow for a user that already exists.""" + + mock_config_entry.add_to_hass(hass) + + result = await hass.config_entries.flow.async_init( + DOMAIN, context={"source": SOURCE_USER} + ) + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + + assert result["url"] == ( + f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}" + "&redirect_uri=https://example.com/auth/external/callback" + f"&state={state}" + ) + + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.clear_requests() + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": jwt, + "scope": "any", + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": "mock-user-id", + "expires_at": 1697753347, + }, + ) + + result2 = await hass.config_entries.flow.async_configure(result["flow_id"]) + assert result2["type"] is FlowResultType.ABORT + assert result2["reason"] == "already_configured" + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_config_entry: MockConfigEntry, + reauth_jwt: str, + mock_setup_entry: Mock, +) -> None: + """Test the reauthentication case updates the existing config entry.""" + + mock_config_entry.add_to_hass(hass) + + mock_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": reauth_jwt, + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "user_id": USER_ID, + "token_type": "Bearer", + "expires_at": 1697753347, + }, + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_successful" + + assert mock_config_entry.unique_id == USER_ID + assert "token" in mock_config_entry.data + # Verify access token is refreshed + assert mock_config_entry.data["token"]["access_token"] == reauth_jwt + + +@pytest.mark.usefixtures("client_credentials") +@pytest.mark.usefixtures("current_request_with_host") +async def test_reauth_wrong_account( + hass: HomeAssistant, + hass_client_no_auth: ClientSessionGenerator, + aioclient_mock: AiohttpClientMocker, + mock_config_entry: MockConfigEntry, + reauth_jwt_wrong_account: str, + jwt: str, + mock_setup_entry: Mock, +) -> None: + """Test the reauthentication aborts, if user tries to reauthenticate with another account.""" + assert mock_config_entry.data["token"]["access_token"] == jwt + + mock_config_entry.add_to_hass(hass) + + mock_config_entry.async_start_reauth(hass) + await hass.async_block_till_done() + + flows = hass.config_entries.flow.async_progress() + assert len(flows) == 1 + result = flows[0] + assert result["step_id"] == "auth" + + state = config_entry_oauth2_flow._encode_jwt( + hass, + { + "flow_id": result["flow_id"], + "redirect_uri": "https://example.com/auth/external/callback", + }, + ) + client = await hass_client_no_auth() + resp = await client.get(f"/auth/external/callback?code=abcd&state={state}") + assert resp.status == 200 + assert resp.headers["content-type"] == "text/html; charset=utf-8" + + aioclient_mock.post( + OAUTH2_TOKEN, + json={ + "access_token": reauth_jwt_wrong_account, + "expires_in": 86399, + "refresh_token": "mock-refresh-token", + "token_type": "Bearer", + "expires_at": 1697753347, + }, + ) + + result = await hass.config_entries.flow.async_configure(result["flow_id"]) + await hass.async_block_till_done() + + assert len(hass.config_entries.async_entries(DOMAIN)) == 1 + + assert result["type"] is FlowResultType.ABORT + assert result["reason"] == "reauth_invalid_user" + + assert mock_config_entry.unique_id == USER_ID + assert "token" in mock_config_entry.data + # Verify access token is like before + assert mock_config_entry.data["token"]["access_token"] == jwt diff --git a/tests/components/yale/test_diagnostics.py b/tests/components/yale/test_diagnostics.py new file mode 100644 index 00000000000..e5fd6b1c1a7 --- /dev/null +++ b/tests/components/yale/test_diagnostics.py @@ -0,0 +1,31 @@ +"""Test yale diagnostics.""" + +from syrupy import SnapshotAssertion + +from homeassistant.core import HomeAssistant + +from .mocks import ( + _create_yale_api_with_devices, + _mock_doorbell_from_fixture, + _mock_lock_from_fixture, +) + +from tests.components.diagnostics import get_diagnostics_for_config_entry +from tests.typing import ClientSessionGenerator + + +async def test_diagnostics( + hass: HomeAssistant, + hass_client: ClientSessionGenerator, + snapshot: SnapshotAssertion, +) -> None: + """Test generating diagnostics for a config entry.""" + lock_one = await _mock_lock_from_fixture( + hass, "get_lock.online_with_doorsense.json" + ) + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + entry, _, _ = await _create_yale_api_with_devices(hass, [lock_one, doorbell_one]) + diag = await get_diagnostics_for_config_entry(hass, hass_client, entry) + + assert diag == snapshot diff --git a/tests/components/yale/test_event.py b/tests/components/yale/test_event.py new file mode 100644 index 00000000000..7aeb9d8f12b --- /dev/null +++ b/tests/components/yale/test_event.py @@ -0,0 +1,162 @@ +"""The event tests for the yale.""" + +from freezegun.api import FrozenDateTimeFactory + +from homeassistant.const import STATE_UNAVAILABLE, STATE_UNKNOWN +from homeassistant.core import HomeAssistant +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_lock_from_fixture, +) + +from tests.common import async_fire_time_changed + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + +async def test_create_doorbell_offline(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + motion_state = hass.states.get("event.tmt100_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNAVAILABLE + doorbell_state = hass.states.get("event.tmt100_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNAVAILABLE + + +async def test_create_doorbell_with_motion( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + activities = await _mock_activities_from_fixture( + hass, "get_activity.doorbell_motion.json" + ) + await _create_yale_with_devices(hass, [doorbell_one], activities=activities) + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state.state == isotime + + +async def test_doorbell_update_via_socketio( + hass: HomeAssistant, freezer: FrozenDateTimeFactory +) -> None: + """Test creation of a doorbell that can be updated via socketio.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + + _, socketio = await _create_yale_with_devices(hass, [doorbell_one]) + assert doorbell_one.pubsub_channel == "7c7a6672-59c8-3333-ffff-dcd98705cccc" + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state == STATE_UNKNOWN + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN + + listener = list(socketio._listeners)[0] + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "doorbell_motion_detected", + "data": { + "event": "doorbell_motion_detected", + "image": { + "height": 640, + "width": 480, + "format": "jpg", + "created_at": "2021-03-16T02:36:26.886Z", + "bytes": 14061, + "secure_url": ( + "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg" + ), + "url": "https://dyu7azbnaoi74.cloudfront.net/images/1f8.jpeg", + "etag": "09e839331c4ea59eef28081f2caa0e90", + }, + "doorbellName": "Front Door", + "callID": None, + "origin": "mars-api", + "mutableContent": True, + }, + }, + ) + + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + isotime = motion_state.state + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + motion_state = hass.states.get("event.k98gidt45gul_name_motion") + assert motion_state is not None + assert motion_state.state != STATE_UNKNOWN + + listener( + doorbell_one.device_id, + dt_util.utcnow(), + { + "status": "buttonpush", + }, + ) + + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + isotime = motion_state.state + + freezer.tick(40) + async_fire_time_changed(hass) + await hass.async_block_till_done() + + doorbell_state = hass.states.get("event.k98gidt45gul_name_doorbell") + assert doorbell_state is not None + assert doorbell_state.state != STATE_UNKNOWN + assert motion_state.state == isotime + + +async def test_create_lock_with_doorbell(hass: HomeAssistant) -> None: + """Test creation of a lock with a doorbell.""" + lock_one = await _mock_lock_from_fixture(hass, "lock_with_doorbell.online.json") + await _create_yale_with_devices(hass, [lock_one]) + + doorbell_state = hass.states.get( + "event.a6697750d607098bae8d6baa11ef8063_name_doorbell" + ) + assert doorbell_state is not None + assert doorbell_state.state == STATE_UNKNOWN diff --git a/tests/components/yale/test_init.py b/tests/components/yale/test_init.py new file mode 100644 index 00000000000..4f0a853710c --- /dev/null +++ b/tests/components/yale/test_init.py @@ -0,0 +1,237 @@ +"""The tests for the yale platform.""" + +from unittest.mock import Mock + +from aiohttp import ClientResponseError +import pytest +from yalexs.exceptions import InvalidAuth, YaleApiError + +from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN +from homeassistant.components.yale.const import DOMAIN +from homeassistant.config_entries import ConfigEntryState +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_OPEN, + SERVICE_UNLOCK, + STATE_LOCKED, + STATE_ON, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr, entity_registry as er +from homeassistant.setup import async_setup_component + +from .mocks import ( + _create_yale_with_devices, + _mock_doorsense_enabled_yale_lock_detail, + _mock_doorsense_missing_yale_lock_detail, + _mock_inoperative_yale_lock_detail, + _mock_lock_with_offline_key, + _mock_operative_yale_lock_detail, +) + +from tests.typing import WebSocketGenerator + + +async def test_yale_api_is_failing(hass: HomeAssistant) -> None: + """Config entry state is SETUP_RETRY when yale api is failing.""" + + config_entry, socketio = await _create_yale_with_devices( + hass, + authenticate_side_effect=YaleApiError( + "offline", ClientResponseError(None, None, status=500) + ), + ) + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_yale_is_offline(hass: HomeAssistant) -> None: + """Config entry state is SETUP_RETRY when yale is offline.""" + + config_entry, socketio = await _create_yale_with_devices( + hass, authenticate_side_effect=TimeoutError + ) + + assert config_entry.state is ConfigEntryState.SETUP_RETRY + + +async def test_yale_late_auth_failure(hass: HomeAssistant) -> None: + """Test we can detect a late auth failure.""" + config_entry, socketio = await _create_yale_with_devices( + hass, + authenticate_side_effect=InvalidAuth( + "authfailed", ClientResponseError(None, None, status=401) + ), + ) + + assert config_entry.state is ConfigEntryState.SETUP_ERROR + flows = hass.config_entries.flow.async_progress() + + assert flows[0]["step_id"] == "pick_implementation" + + +async def test_unlock_throws_yale_api_http_error(hass: HomeAssistant) -> None: + """Test unlock throws correct error on http error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + aiohttp_client_response_exception = ClientResponseError(None, None, status=400) + + def _unlock_return_activities_side_effect(access_token, device_id): + raise YaleApiError( + "This should bubble up as its user consumable", + aiohttp_client_response_exception, + ) + + await _create_yale_with_devices( + hass, + [mocked_lock_detail], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + +async def test_lock_throws_yale_api_http_error(hass: HomeAssistant) -> None: + """Test lock throws correct error on http error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + aiohttp_client_response_exception = ClientResponseError(None, None, status=400) + + def _lock_return_activities_side_effect(access_token, device_id): + raise YaleApiError( + "This should bubble up as its user consumable", + aiohttp_client_response_exception, + ) + + await _create_yale_with_devices( + hass, + [mocked_lock_detail], + api_call_side_effects={ + "lock_return_activities": _lock_return_activities_side_effect + }, + ) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises( + HomeAssistantError, + match=( + "A6697750D607098BAE8D6BAA11EF8063 Name: This should bubble up as its user" + " consumable" + ), + ): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + + +async def test_open_throws_hass_service_not_supported_error( + hass: HomeAssistant, +) -> None: + """Test open throws correct error on entity does not support this service error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [mocked_lock_detail]) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises(HomeAssistantError): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + + +async def test_inoperative_locks_are_filtered_out(hass: HomeAssistant) -> None: + """Ensure inoperative locks do not get setup.""" + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + yale_inoperative_lock = await _mock_inoperative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [yale_operative_lock, yale_inoperative_lock]) + + lock_abc_name = hass.states.get("lock.abc_name") + assert lock_abc_name is None + lock_a6697750d607098bae8d6baa11ef8063_name = hass.states.get( + "lock.a6697750d607098bae8d6baa11ef8063_name" + ) + assert lock_a6697750d607098bae8d6baa11ef8063_name.state == STATE_LOCKED + + +async def test_lock_has_doorsense(hass: HomeAssistant) -> None: + """Check to see if a lock has doorsense.""" + doorsenselock = await _mock_doorsense_enabled_yale_lock_detail(hass) + nodoorsenselock = await _mock_doorsense_missing_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [doorsenselock, nodoorsenselock]) + + binary_sensor_online_with_doorsense_name_open = hass.states.get( + "binary_sensor.online_with_doorsense_name_door" + ) + assert binary_sensor_online_with_doorsense_name_open.state == STATE_ON + binary_sensor_missing_doorsense_id_name_open = hass.states.get( + "binary_sensor.missing_with_doorsense_name_door" + ) + assert binary_sensor_missing_doorsense_id_name_open is None + + +async def test_load_unload(hass: HomeAssistant) -> None: + """Config entry can be unloaded.""" + + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + yale_inoperative_lock = await _mock_inoperative_yale_lock_detail(hass) + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_operative_lock, yale_inoperative_lock] + ) + + assert config_entry.state is ConfigEntryState.LOADED + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.NOT_LOADED + + +async def test_load_triggers_ble_discovery( + hass: HomeAssistant, mock_discovery: Mock +) -> None: + """Test that loading a lock that supports offline ble operation passes the keys to yalexe_ble.""" + + yale_lock_with_key = await _mock_lock_with_offline_key(hass) + yale_lock_without_key = await _mock_operative_yale_lock_detail(hass) + + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_lock_with_key, yale_lock_without_key] + ) + await hass.async_block_till_done() + assert config_entry.state is ConfigEntryState.LOADED + + assert len(mock_discovery.mock_calls) == 1 + assert mock_discovery.mock_calls[0].kwargs["data"] == { + "name": "Front Door Lock", + "address": None, + "serial": "X2FSW05DGA", + "key": "kkk01d4300c1dcxxx1c330f794941111", + "slot": 1, + } + + +async def test_device_remove_devices( + hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, + device_registry: dr.DeviceRegistry, + entity_registry: er.EntityRegistry, +) -> None: + """Test we can only remove a device that no longer exists.""" + assert await async_setup_component(hass, "config", {}) + yale_operative_lock = await _mock_operative_yale_lock_detail(hass) + config_entry, socketio = await _create_yale_with_devices( + hass, [yale_operative_lock] + ) + entity = entity_registry.entities["lock.a6697750d607098bae8d6baa11ef8063_name"] + + device_entry = device_registry.async_get(entity.device_id) + client = await hass_ws_client(hass) + response = await client.remove_device(device_entry.id, config_entry.entry_id) + assert not response["success"] + + dead_device_entry = device_registry.async_get_or_create( + config_entry_id=config_entry.entry_id, + identifiers={(DOMAIN, "remove-device-id")}, + ) + response = await client.remove_device(dead_device_entry.id, config_entry.entry_id) + assert response["success"] diff --git a/tests/components/yale/test_lock.py b/tests/components/yale/test_lock.py new file mode 100644 index 00000000000..2bbb7408953 --- /dev/null +++ b/tests/components/yale/test_lock.py @@ -0,0 +1,432 @@ +"""The lock tests for the yale platform.""" + +import datetime + +from aiohttp import ClientResponseError +from freezegun.api import FrozenDateTimeFactory +import pytest +from syrupy import SnapshotAssertion +from yalexs.manager.activity import INITIAL_LOCK_RESYNC_TIME + +from homeassistant.components.lock import ( + DOMAIN as LOCK_DOMAIN, + STATE_JAMMED, + STATE_LOCKING, + STATE_UNLOCKING, +) +from homeassistant.const import ( + ATTR_ENTITY_ID, + SERVICE_LOCK, + SERVICE_OPEN, + SERVICE_UNLOCK, + STATE_LOCKED, + STATE_UNAVAILABLE, + STATE_UNKNOWN, + STATE_UNLOCKED, +) +from homeassistant.core import HomeAssistant +from homeassistant.exceptions import HomeAssistantError +from homeassistant.helpers import device_registry as dr, entity_registry as er +import homeassistant.util.dt as dt_util + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, + _mock_lock_with_unlatch, + _mock_operative_yale_lock_detail, +) + +from tests.common import async_fire_time_changed + + +async def test_lock_device_registry( + hass: HomeAssistant, device_registry: dr.DeviceRegistry, snapshot: SnapshotAssertion +) -> None: + """Test creation of a lock with doorsense and bridge ands up in the registry.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [lock_one]) + + reg_device = device_registry.async_get_device( + identifiers={("yale", "online_with_doorsense")} + ) + assert reg_device == snapshot + + +async def test_lock_changed_by(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["changed_by"] == "Your favorite elven princess" + + +async def test_state_locking(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is locking.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.locking.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_LOCKING + + +async def test_state_unlocking(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is unlocking.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlocking.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_online_with_doorsense_name = hass.states.get("lock.online_with_doorsense_name") + + assert lock_online_with_doorsense_name.state == STATE_UNLOCKING + + +async def test_state_jammed(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that is jammed.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.jammed.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + assert hass.states.get("lock.online_with_doorsense_name").state == STATE_JAMMED + + +async def test_one_lock_operation( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [lock_one]) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + + assert lock_state.state == STATE_LOCKED + + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED + + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + + # No activity means it will be unavailable until the activity feed has data + assert entity_registry.async_get("sensor.online_with_doorsense_name_operator") + operator_state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert operator_state.state == STATE_UNKNOWN + + +async def test_open_lock_operation(hass: HomeAssistant) -> None: + """Test open lock operation using the open service.""" + lock_with_unlatch = await _mock_lock_with_unlatch(hass) + await _create_yale_with_devices(hass, [lock_with_unlatch]) + + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_LOCKED + + data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_UNLOCKED + + +async def test_open_lock_operation_socketio_connected( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test open lock operation using the open service when socketio is connected.""" + lock_with_unlatch = await _mock_lock_with_unlatch(hass) + assert lock_with_unlatch.pubsub_channel == "pubsub" + + _, socketio = await _create_yale_with_devices(hass, [lock_with_unlatch]) + socketio.connected = True + + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_LOCKED + + data = {ATTR_ENTITY_ID: "lock.online_with_unlatch_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) + + listener = list(socketio._listeners)[0] + listener( + lock_with_unlatch.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert hass.states.get("lock.online_with_unlatch_name").state == STATE_UNLOCKED + await hass.async_block_till_done() + + +async def test_one_lock_operation_socketio_connected( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test lock and unlock operations are async when socketio is connected.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + states = hass.states + + _, socketio = await _create_yale_with_devices(hass, [lock_one]) + socketio.connected = True + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=1), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_UNLOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + await hass.services.async_call(LOCK_DOMAIN, SERVICE_LOCK, data, blocking=True) + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Locked", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED + + # No activity means it will be unavailable until the activity feed has data + assert entity_registry.async_get("sensor.online_with_doorsense_name_operator") + assert ( + states.get("sensor.online_with_doorsense_name_operator").state == STATE_UNKNOWN + ) + + freezer.tick(INITIAL_LOCK_RESYNC_TIME) + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocked", + }, + ) + + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKED + + +async def test_lock_jammed(hass: HomeAssistant) -> None: + """Test lock gets jammed on unlock.""" + + def _unlock_return_activities_side_effect(access_token, device_id): + raise ClientResponseError(None, None, status=531) + + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices( + hass, + [lock_one], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + + states = hass.states + lock_state = states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + assert states.get("lock.online_with_doorsense_name").state == STATE_JAMMED + + +async def test_lock_throws_exception_on_unknown_status_code( + hass: HomeAssistant, +) -> None: + """Test lock throws exception.""" + + def _unlock_return_activities_side_effect(access_token, device_id): + raise ClientResponseError(None, None, status=500) + + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + await _create_yale_with_devices( + hass, + [lock_one], + api_call_side_effects={ + "unlock_return_activities": _unlock_return_activities_side_effect + }, + ) + + lock_state = hass.states.get("lock.online_with_doorsense_name") + assert lock_state.state == STATE_LOCKED + assert lock_state.attributes["battery_level"] == 92 + assert lock_state.attributes["friendly_name"] == "online_with_doorsense Name" + + data = {ATTR_ENTITY_ID: "lock.online_with_doorsense_name"} + with pytest.raises(ClientResponseError): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_UNLOCK, data, blocking=True) + + +async def test_one_lock_unknown_state(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_lock_from_fixture( + hass, + "get_lock.online.unknown_state.json", + ) + await _create_yale_with_devices(hass, [lock_one]) + + assert hass.states.get("lock.brokenid_name").state == STATE_UNKNOWN + + +async def test_lock_bridge_offline(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_offline.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + states = hass.states + assert states.get("lock.online_with_doorsense_name").state == STATE_UNAVAILABLE + + +async def test_lock_bridge_online(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge that goes offline.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.bridge_online.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + states = hass.states + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED + + +async def test_lock_update_via_socketio(hass: HomeAssistant) -> None: + """Test creation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + assert lock_one.pubsub_channel == "pubsub" + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + config_entry, socketio = await _create_yale_with_devices( + hass, [lock_one], activities=activities + ) + socketio.connected = True + states = hass.states + + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKED + + listener = list(socketio._listeners)[0] + listener( + lock_one.device_id, + dt_util.utcnow(), + { + "status": "kAugLockState_Unlocking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING + + listener( + lock_one.device_id, + dt_util.utcnow(), + { + "status": "kAugLockState_Locking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING + + socketio.connected = True + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(seconds=30)) + await hass.async_block_till_done() + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING + + # Ensure socketio status is always preserved + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=2)) + await hass.async_block_till_done() + assert states.get("lock.online_with_doorsense_name").state == STATE_LOCKING + + listener( + lock_one.device_id, + dt_util.utcnow() + datetime.timedelta(seconds=2), + { + "status": "kAugLockState_Unlocking", + }, + ) + + await hass.async_block_till_done() + await hass.async_block_till_done() + + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING + + async_fire_time_changed(hass, dt_util.utcnow() + datetime.timedelta(hours=4)) + await hass.async_block_till_done() + assert states.get("lock.online_with_doorsense_name").state == STATE_UNLOCKING + + await hass.config_entries.async_unload(config_entry.entry_id) + await hass.async_block_till_done() + + +async def test_open_throws_hass_service_not_supported_error( + hass: HomeAssistant, +) -> None: + """Test open throws correct error on entity does not support this service error.""" + mocked_lock_detail = await _mock_operative_yale_lock_detail(hass) + await _create_yale_with_devices(hass, [mocked_lock_detail]) + data = {ATTR_ENTITY_ID: "lock.a6697750d607098bae8d6baa11ef8063_name"} + with pytest.raises(HomeAssistantError, match="does not support this service"): + await hass.services.async_call(LOCK_DOMAIN, SERVICE_OPEN, data, blocking=True) diff --git a/tests/components/yale/test_sensor.py b/tests/components/yale/test_sensor.py new file mode 100644 index 00000000000..5d724b4bb9d --- /dev/null +++ b/tests/components/yale/test_sensor.py @@ -0,0 +1,320 @@ +"""The sensor tests for the yale platform.""" + +from typing import Any + +from syrupy import SnapshotAssertion + +from homeassistant import core as ha +from homeassistant.const import ( + ATTR_ENTITY_PICTURE, + ATTR_UNIT_OF_MEASUREMENT, + PERCENTAGE, + STATE_UNKNOWN, +) +from homeassistant.core import CoreState, HomeAssistant +from homeassistant.helpers import entity_registry as er + +from .mocks import ( + _create_yale_with_devices, + _mock_activities_from_fixture, + _mock_doorbell_from_fixture, + _mock_doorsense_enabled_yale_lock_detail, + _mock_lock_from_fixture, +) + +from tests.common import mock_restore_cache_with_extra_data + + +async def test_create_doorbell(hass: HomeAssistant) -> None: + """Test creation of a doorbell.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + battery_state = hass.states.get("sensor.k98gidt45gul_name_battery") + assert battery_state.state == "96" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + + +async def test_create_doorbell_offline( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a doorbell that is offline.""" + doorbell_one = await _mock_doorbell_from_fixture(hass, "get_doorbell.offline.json") + await _create_yale_with_devices(hass, [doorbell_one]) + + battery_state = hass.states.get("sensor.tmt100_name_battery") + assert battery_state.state == "81" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + + entry = entity_registry.async_get("sensor.tmt100_name_battery") + assert entry + assert entry.unique_id == "tmt100_device_battery" + + +async def test_create_doorbell_hardwired(hass: HomeAssistant) -> None: + """Test creation of a doorbell that is hardwired without a battery.""" + doorbell_one = await _mock_doorbell_from_fixture( + hass, "get_doorbell.nobattery.json" + ) + await _create_yale_with_devices(hass, [doorbell_one]) + + sensor_tmt100_name_battery = hass.states.get("sensor.tmt100_name_battery") + assert sensor_tmt100_name_battery is None + + +async def test_create_lock_with_linked_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with a linked keypad that both have a battery.""" + lock_one = await _mock_lock_from_fixture(hass, "get_lock.doorsense_init.json") + await _create_yale_with_devices(hass, [lock_one]) + + battery_state = hass.states.get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + + entry = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert entry + assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" + + keypad_battery_state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert keypad_battery_state.state == "62" + assert keypad_battery_state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") + assert entry + assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" + + +async def test_create_lock_with_low_battery_linked_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test creation of a lock with a linked keypad that both have a battery.""" + lock_one = await _mock_lock_from_fixture(hass, "get_lock.low_keypad_battery.json") + await _create_yale_with_devices(hass, [lock_one]) + + battery_state = hass.states.get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert battery_state.state == "88" + assert battery_state.attributes["unit_of_measurement"] == PERCENTAGE + entry = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_battery" + ) + assert entry + assert entry.unique_id == "A6697750D607098BAE8D6BAA11EF8063_device_battery" + + state = hass.states.get("sensor.front_door_lock_keypad_battery") + assert state.state == "10" + assert state.attributes[ATTR_UNIT_OF_MEASUREMENT] == PERCENTAGE + entry = entity_registry.async_get("sensor.front_door_lock_keypad_battery") + assert entry + assert entry.unique_id == "5bc65c24e6ef2a263e1450a8_linked_keypad_battery" + + # No activity means it will be unavailable until someone unlocks/locks it + lock_operator_sensor = entity_registry.async_get( + "sensor.a6697750d607098bae8d6baa11ef8063_name_operator" + ) + assert ( + lock_operator_sensor.unique_id + == "A6697750D607098BAE8D6BAA11EF8063_lock_operator" + ) + assert ( + hass.states.get("sensor.a6697750d607098bae8d6baa11ef8063_name_operator").state + == STATE_UNKNOWN + ) + + +async def test_lock_operator_bluetooth( + hass: HomeAssistant, entity_registry: er.EntityRegistry +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_bluetooth.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes["manual"] is False + assert state.attributes["tag"] is False + assert state.attributes["remote"] is False + assert state.attributes["keypad"] is False + assert state.attributes["autorelock"] is False + assert state.attributes["method"] == "mobile" + + +async def test_lock_operator_keypad( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_keypad.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes == snapshot + + +async def test_lock_operator_remote( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture(hass, "get_activity.lock.json") + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes == snapshot + + +async def test_lock_operator_manual( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_manual.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes == snapshot + + +async def test_lock_operator_autorelock( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with doorsense and bridge.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.lock_from_autorelock.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Auto Relock" + assert state.attributes == snapshot + + +async def test_unlock_operator_manual( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock manually.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlock_from_manual.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state == snapshot + + +async def test_unlock_operator_tag( + hass: HomeAssistant, entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion +) -> None: + """Test operation of a lock with a tag.""" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + activities = await _mock_activities_from_fixture( + hass, "get_activity.unlock_from_tag.json" + ) + await _create_yale_with_devices(hass, [lock_one], activities=activities) + + lock_operator_sensor = entity_registry.async_get( + "sensor.online_with_doorsense_name_operator" + ) + assert lock_operator_sensor + + state = hass.states.get("sensor.online_with_doorsense_name_operator") + assert state.state == "Your favorite elven princess" + assert state.attributes == snapshot + + +async def test_restored_state( + hass: HomeAssistant, hass_storage: dict[str, Any], snapshot: SnapshotAssertion +) -> None: + """Test restored state.""" + + entity_id = "sensor.online_with_doorsense_name_operator" + lock_one = await _mock_doorsense_enabled_yale_lock_detail(hass) + + fake_state = ha.State( + entity_id, + state="Tag Unlock", + attributes={ + "method": "tag", + "manual": False, + "remote": False, + "keypad": False, + "tag": True, + "autorelock": False, + ATTR_ENTITY_PICTURE: "image.png", + }, + ) + + # Home assistant is not running yet + hass.set_state(CoreState.not_running) + mock_restore_cache_with_extra_data( + hass, + [ + ( + fake_state, + {"native_value": "Tag Unlock", "native_unit_of_measurement": None}, + ) + ], + ) + + await _create_yale_with_devices(hass, [lock_one]) + + await hass.async_block_till_done() + + state = hass.states.get(entity_id) + assert state.state == "Tag Unlock" + assert state == snapshot diff --git a/tests/components/yale_smart_alarm/conftest.py b/tests/components/yale_smart_alarm/conftest.py index 9583df5faa6..6ac6dfc6871 100644 --- a/tests/components/yale_smart_alarm/conftest.py +++ b/tests/components/yale_smart_alarm/conftest.py @@ -7,6 +7,7 @@ from typing import Any from unittest.mock import Mock, patch import pytest +from yalesmartalarmclient import YaleSmartAlarmData from yalesmartalarmclient.const import YALE_STATE_ARM_FULL from homeassistant.components.yale_smart_alarm.const import DOMAIN, PLATFORMS @@ -33,7 +34,10 @@ async def patch_platform_constant() -> list[Platform]: @pytest.fixture async def load_config_entry( - hass: HomeAssistant, load_json: dict[str, Any], load_platforms: list[Platform] + hass: HomeAssistant, + get_data: YaleSmartAlarmData, + get_all_data: YaleSmartAlarmData, + load_platforms: list[Platform], ) -> tuple[MockConfigEntry, Mock]: """Set up the Yale Smart Living integration in Home Assistant.""" with patch("homeassistant.components.yale_smart_alarm.PLATFORMS", load_platforms): @@ -56,7 +60,8 @@ async def load_config_entry( client = mock_client_class.return_value client.auth = Mock() client.lock_api = Mock() - client.get_all.return_value = load_json + client.get_all.return_value = get_all_data + client.get_information.return_value = get_data client.get_armed_status.return_value = YALE_STATE_ARM_FULL await hass.config_entries.async_setup(config_entry.entry_id) await hass.async_block_till_done() @@ -64,10 +69,50 @@ async def load_config_entry( return (config_entry, client) -@pytest.fixture(name="load_json", scope="package") -def load_json_from_fixture() -> dict[str, Any]: +@pytest.fixture(name="loaded_fixture", scope="package") +def get_fixture_data() -> dict[str, Any]: """Load fixture with json data and return.""" data_fixture = load_fixture("get_all.json", "yale_smart_alarm") json_data: dict[str, Any] = json.loads(data_fixture) return json_data + + +@pytest.fixture(name="get_data", scope="package") +def get_update_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: + """Load update data and return.""" + + status = loaded_fixture["STATUS"] + cycle = loaded_fixture["CYCLE"] + online = loaded_fixture["ONLINE"] + panel_info = loaded_fixture["PANEL INFO"] + return YaleSmartAlarmData( + status=status, + cycle=cycle, + online=online, + panel_info=panel_info, + ) + + +@pytest.fixture(name="get_all_data", scope="package") +def get_diag_data(loaded_fixture: dict[str, Any]) -> YaleSmartAlarmData: + """Load all data and return.""" + + devices = loaded_fixture["DEVICES"] + mode = loaded_fixture["MODE"] + status = loaded_fixture["STATUS"] + cycle = loaded_fixture["CYCLE"] + online = loaded_fixture["ONLINE"] + history = loaded_fixture["HISTORY"] + panel_info = loaded_fixture["PANEL INFO"] + auth_check = loaded_fixture["AUTH CHECK"] + return YaleSmartAlarmData( + devices=devices, + mode=mode, + status=status, + cycle=cycle, + online=online, + history=history, + panel_info=panel_info, + auth_check=auth_check, + ) diff --git a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr index a5dfe4b50dd..d4bbd42aaeb 100644 --- a/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr +++ b/tests/components/yale_smart_alarm/snapshots/test_diagnostics.ambr @@ -1,7 +1,7 @@ # serializer version: 1 # name: test_diagnostics dict({ - 'AUTH CHECK': dict({ + 'auth_check': dict({ 'agent': False, 'dealer_group': 'yale', 'dealer_id': '605', @@ -16,7 +16,7 @@ 'user_id': '**REDACTED**', 'xml_version': '2', }), - 'CYCLE': dict({ + 'cycle': dict({ 'alarm_event_latest': None, 'capture_latest': None, 'device_status': list([ @@ -650,7 +650,7 @@ 'utc_event_time': None, }), }), - 'DEVICES': list([ + 'devices': list([ dict({ 'address': '**REDACTED**', 'area': '1', @@ -1249,7 +1249,7 @@ 'type_no': '40', }), ]), - 'HISTORY': list([ + 'history': list([ dict({ 'area': 1, 'cid': '18180701000', @@ -1391,14 +1391,14 @@ 'zone': 1, }), ]), - 'MODE': list([ + 'mode': list([ dict({ 'area': '1', 'mode': 'disarm', }), ]), - 'ONLINE': 'online', - 'PANEL INFO': dict({ + 'online': 'online', + 'panel_info': dict({ 'SMS_Balance': '50', 'contact': '', 'dealer_name': 'Poland', @@ -1416,7 +1416,7 @@ 'zb_version': '4.1.2.6.2', 'zw_version': '', }), - 'STATUS': dict({ + 'status': dict({ 'acfail': 'main.normal', 'battery': 'main.normal', 'gsm_rssi': '0', diff --git a/tests/components/yale_smart_alarm/test_config_flow.py b/tests/components/yale_smart_alarm/test_config_flow.py index 4ef201d2122..d5651503768 100644 --- a/tests/components/yale_smart_alarm/test_config_flow.py +++ b/tests/components/yale_smart_alarm/test_config_flow.py @@ -132,15 +132,7 @@ async def test_reauth_flow(hass: HomeAssistant) -> None: ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["step_id"] == "reauth_confirm" assert result["type"] is FlowResultType.FORM assert result["errors"] == {} @@ -202,15 +194,7 @@ async def test_reauth_flow_error( ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": entry.unique_id, - "entry_id": entry.entry_id, - }, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) with patch( "homeassistant.components.yale_smart_alarm.config_flow.YaleSmartAlarmClient", diff --git a/tests/components/yale_smart_alarm/test_coordinator.py b/tests/components/yale_smart_alarm/test_coordinator.py index 6f1125fcf65..41362f2318a 100644 --- a/tests/components/yale_smart_alarm/test_coordinator.py +++ b/tests/components/yale_smart_alarm/test_coordinator.py @@ -3,12 +3,15 @@ from __future__ import annotations from datetime import timedelta -from typing import Any from unittest.mock import Mock, patch import pytest -from yalesmartalarmclient.const import YALE_STATE_ARM_FULL -from yalesmartalarmclient.exceptions import AuthenticationError, UnknownError +from yalesmartalarmclient import ( + YALE_STATE_ARM_FULL, + AuthenticationError, + UnknownError, + YaleSmartAlarmData, +) from homeassistant.components.yale_smart_alarm.const import DOMAIN from homeassistant.config_entries import SOURCE_USER @@ -32,7 +35,7 @@ from tests.common import MockConfigEntry, async_fire_time_changed ) async def test_coordinator_setup_errors( hass: HomeAssistant, - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, p_error: Exception, ) -> None: """Test the Yale Smart Living coordinator with errors.""" @@ -64,7 +67,7 @@ async def test_coordinator_setup_errors( async def test_coordinator_setup_and_update_errors( hass: HomeAssistant, load_config_entry: tuple[MockConfigEntry, Mock], - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, ) -> None: """Test the Yale Smart Living coordinator with errors.""" @@ -74,51 +77,51 @@ async def test_coordinator_setup_and_update_errors( assert state.state == STATE_ALARM_ARMED_AWAY client.reset_mock() - client.get_all.side_effect = ConnectionError("Could not connect") + client.get_information.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=1)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_all.side_effect = ConnectionError("Could not connect") + client.get_information.side_effect = ConnectionError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=2)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_all.side_effect = TimeoutError("Could not connect") + client.get_information.side_effect = TimeoutError("Could not connect") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=3)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_all.side_effect = UnknownError("info") + client.get_information.side_effect = UnknownError("info") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=4)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE client.reset_mock() - client.get_all.side_effect = None - client.get_all.return_value = load_json + client.get_information.side_effect = None + client.get_information.return_value = get_data client.get_armed_status.return_value = YALE_STATE_ARM_FULL async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=5)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_ALARM_ARMED_AWAY client.reset_mock() - client.get_all.side_effect = AuthenticationError("Can not authenticate") + client.get_information.side_effect = AuthenticationError("Can not authenticate") async_fire_time_changed(hass, dt_util.utcnow() + timedelta(minutes=6)) await hass.async_block_till_done(wait_background_tasks=True) - client.get_all.assert_called_once() + client.get_information.assert_called_once() state = hass.states.get("alarm_control_panel.yale_smart_alarm") assert state.state == STATE_UNAVAILABLE diff --git a/tests/components/yale_smart_alarm/test_lock.py b/tests/components/yale_smart_alarm/test_lock.py index 09ce8529084..7c67703924b 100644 --- a/tests/components/yale_smart_alarm/test_lock.py +++ b/tests/components/yale_smart_alarm/test_lock.py @@ -3,13 +3,11 @@ from __future__ import annotations from copy import deepcopy -from typing import Any from unittest.mock import Mock import pytest from syrupy.assertion import SnapshotAssertion -from yalesmartalarmclient.exceptions import UnknownError -from yalesmartalarmclient.lock import YaleDoorManAPI +from yalesmartalarmclient import UnknownError, YaleDoorManAPI, YaleSmartAlarmData from homeassistant.components.lock import DOMAIN as LOCK_DOMAIN from homeassistant.const import ( @@ -20,7 +18,7 @@ from homeassistant.const import ( Platform, ) from homeassistant.core import HomeAssistant -from homeassistant.exceptions import HomeAssistantError +from homeassistant.exceptions import HomeAssistantError, ServiceValidationError from homeassistant.helpers import entity_registry as er from tests.common import MockConfigEntry, snapshot_platform @@ -47,7 +45,7 @@ async def test_lock( ) async def test_lock_service_calls( hass: HomeAssistant, - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, @@ -56,8 +54,8 @@ async def test_lock_service_calls( client = load_config_entry[1] - data = deepcopy(load_json) - data["data"] = data.pop("DEVICES") + data = deepcopy(get_data.cycle) + data["data"] = data.pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "000"}) @@ -66,6 +64,14 @@ async def test_lock_service_calls( state = hass.states.get("lock.device1") assert state.state == "locked" + with pytest.raises(ServiceValidationError): + await hass.services.async_call( + LOCK_DOMAIN, + SERVICE_UNLOCK, + {ATTR_ENTITY_ID: "lock.device1"}, + blocking=True, + ) + await hass.services.async_call( LOCK_DOMAIN, SERVICE_UNLOCK, @@ -93,7 +99,7 @@ async def test_lock_service_calls( ) async def test_lock_service_call_fails( hass: HomeAssistant, - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, @@ -102,8 +108,8 @@ async def test_lock_service_call_fails( client = load_config_entry[1] - data = deepcopy(load_json) - data["data"] = data.pop("DEVICES") + data = deepcopy(get_data.cycle) + data["data"] = data.pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(side_effect=UnknownError("test_side_effect")) @@ -145,7 +151,7 @@ async def test_lock_service_call_fails( ) async def test_lock_service_call_fails_with_incorrect_status( hass: HomeAssistant, - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, load_config_entry: tuple[MockConfigEntry, Mock], entity_registry: er.EntityRegistry, snapshot: SnapshotAssertion, @@ -154,8 +160,8 @@ async def test_lock_service_call_fails_with_incorrect_status( client = load_config_entry[1] - data = deepcopy(load_json) - data["data"] = data.pop("DEVICES") + data = deepcopy(get_data.cycle) + data["data"] = data.pop("device_status") client.auth.get_authenticated = Mock(return_value=data) client.auth.post_authenticated = Mock(return_value={"code": "FFF"}) diff --git a/tests/components/yale_smart_alarm/test_sensor.py b/tests/components/yale_smart_alarm/test_sensor.py index d91ddc0e6ce..848d31cedc3 100644 --- a/tests/components/yale_smart_alarm/test_sensor.py +++ b/tests/components/yale_smart_alarm/test_sensor.py @@ -2,9 +2,10 @@ from __future__ import annotations -from typing import Any from unittest.mock import Mock +from yalesmartalarmclient import YaleSmartAlarmData + from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry @@ -13,7 +14,7 @@ from tests.common import MockConfigEntry async def test_coordinator_setup_and_update_errors( hass: HomeAssistant, load_config_entry: tuple[MockConfigEntry, Mock], - load_json: dict[str, Any], + get_data: YaleSmartAlarmData, ) -> None: """Test the Yale Smart Living coordinator with errors.""" diff --git a/tests/components/yalexs_ble/test_config_flow.py b/tests/components/yalexs_ble/test_config_flow.py index 15552fdec5f..5d57095ccd5 100644 --- a/tests/components/yalexs_ble/test_config_flow.py +++ b/tests/components/yalexs_ble/test_config_flow.py @@ -945,11 +945,7 @@ async def test_reauth(hass: HomeAssistant) -> None: unique_id=YALE_ACCESS_LOCK_DISCOVERY_INFO.address, ) entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_REAUTH, "entry_id": entry.entry_id}, - data=entry.data, - ) + result = await entry.start_reauth_flow(hass) assert result["type"] is FlowResultType.FORM assert result["step_id"] == "reauth_validate" diff --git a/tests/components/yamaha/test_media_player.py b/tests/components/yamaha/test_media_player.py index 6a5729a70b3..2375e7d07f4 100644 --- a/tests/components/yamaha/test_media_player.py +++ b/tests/components/yamaha/test_media_player.py @@ -25,7 +25,7 @@ def _create_zone_mock(name, url): class FakeYamahaDevice: """A fake Yamaha device.""" - def __init__(self, ctrl_url, name, zones=None): + def __init__(self, ctrl_url, name, zones=None) -> None: """Initialize the fake Yamaha device.""" self.ctrl_url = ctrl_url self.name = name diff --git a/tests/components/yandex_transport/test_sensor.py b/tests/components/yandex_transport/test_sensor.py index 5ad9fa92c39..13432850b2b 100644 --- a/tests/components/yandex_transport/test_sensor.py +++ b/tests/components/yandex_transport/test_sensor.py @@ -1,6 +1,7 @@ """Tests for the yandex transport platform.""" import json +from typing import Any from unittest.mock import AsyncMock, patch import pytest @@ -76,7 +77,9 @@ SUBURBAN_RESULT_STATE = dt_util.utc_from_timestamp(1634984640).isoformat( ) -async def assert_setup_sensor(hass, config, count=1): +async def assert_setup_sensor( + hass: HomeAssistant, config: dict[str, Any], count: int = 1 +) -> None: """Set up the sensor and assert it's been created.""" with assert_setup_component(count): assert await async_setup_component(hass, sensor.DOMAIN, config) diff --git a/tests/components/yandextts/test_tts.py b/tests/components/yandextts/test_tts.py index 496c187469a..77878c2be51 100644 --- a/tests/components/yandextts/test_tts.py +++ b/tests/components/yandextts/test_tts.py @@ -29,9 +29,8 @@ def tts_mutagen_mock_fixture_autouse(tts_mutagen_mock: MagicMock) -> None: @pytest.fixture(autouse=True) -def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> Path: +def mock_tts_cache_dir_autouse(mock_tts_cache_dir: Path) -> None: """Mock the TTS cache dir with empty dir.""" - return mock_tts_cache_dir async def test_setup_component(hass: HomeAssistant) -> None: diff --git a/tests/components/yeelight/__init__.py b/tests/components/yeelight/__init__.py index 2de064cf567..bdd8cdda312 100644 --- a/tests/components/yeelight/__init__.py +++ b/tests/components/yeelight/__init__.py @@ -109,7 +109,7 @@ CONFIG_ENTRY_DATA = {CONF_ID: ID} class MockAsyncBulb: """A mock for yeelight.aio.AsyncBulb.""" - def __init__(self, model, bulb_type, cannot_connect): + def __init__(self, model, bulb_type, cannot_connect) -> None: """Init the mock.""" self.model = model self.bulb_type = bulb_type diff --git a/tests/components/yolink/test_config_flow.py b/tests/components/yolink/test_config_flow.py index d7ba09e4269..1dd71368d73 100644 --- a/tests/components/yolink/test_config_flow.py +++ b/tests/components/yolink/test_config_flow.py @@ -172,15 +172,7 @@ async def test_reauthentication( ) old_entry.add_to_hass(hass) - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={ - "source": config_entries.SOURCE_REAUTH, - "unique_id": old_entry.unique_id, - "entry_id": old_entry.entry_id, - }, - data=old_entry.data, - ) + result = await old_entry.start_reauth_flow(hass) flows = hass.config_entries.flow.async_progress() assert len(flows) == 1 diff --git a/tests/components/yolink/test_device_trigger.py b/tests/components/yolink/test_device_trigger.py index 6b48b32fd62..c1d3a8acda8 100644 --- a/tests/components/yolink/test_device_trigger.py +++ b/tests/components/yolink/test_device_trigger.py @@ -6,6 +6,7 @@ from yolink.const import ATTR_DEVICE_DIMMER, ATTR_DEVICE_SMART_REMOTER from homeassistant.components import automation from homeassistant.components.device_automation import DeviceAutomationType from homeassistant.components.yolink import DOMAIN, YOLINK_EVENT +from homeassistant.components.yolink.const import DEV_MODEL_FLEX_FOB_YS3604_UC from homeassistant.core import HomeAssistant, ServiceCall from homeassistant.helpers import device_registry as dr from homeassistant.setup import async_setup_component @@ -23,6 +24,7 @@ async def test_get_triggers( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, model=ATTR_DEVICE_SMART_REMOTER, + model_id=DEV_MODEL_FLEX_FOB_YS3604_UC, ) expected_triggers = [ @@ -99,6 +101,7 @@ async def test_get_triggers_exception( config_entry_id=config_entry.entry_id, connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")}, model=ATTR_DEVICE_DIMMER, + model_id=None, ) expected_triggers = [] @@ -123,6 +126,7 @@ async def test_if_fires_on_event( connections={connection}, identifiers={(DOMAIN, mac_address)}, model=ATTR_DEVICE_SMART_REMOTER, + model_id=DEV_MODEL_FLEX_FOB_YS3604_UC, ) assert await async_setup_component( diff --git a/tests/components/zerproc/test_light.py b/tests/components/zerproc/test_light.py index c47f960b182..6e00cfbde4c 100644 --- a/tests/components/zerproc/test_light.py +++ b/tests/components/zerproc/test_light.py @@ -35,13 +35,13 @@ from tests.common import MockConfigEntry, async_fire_time_changed @pytest.fixture -async def mock_entry(hass): +async def mock_entry() -> MockConfigEntry: """Create a mock light entity.""" return MockConfigEntry(domain=DOMAIN) @pytest.fixture -async def mock_light(hass, mock_entry): +async def mock_light(hass: HomeAssistant, mock_entry: MockConfigEntry) -> MagicMock: """Create a mock light entity.""" mock_entry.add_to_hass(hass) diff --git a/tests/components/zeversolar/snapshots/test_sensor.ambr b/tests/components/zeversolar/snapshots/test_sensor.ambr index bee522133a5..aaef2c43d79 100644 --- a/tests/components/zeversolar/snapshots/test_sensor.ambr +++ b/tests/components/zeversolar/snapshots/test_sensor.ambr @@ -1,24 +1,4 @@ # serializer version: 1 -# name: test_sensors - ConfigEntrySnapshot({ - 'data': dict({ - 'host': 'zeversolar-fake-host', - 'port': 10200, - }), - 'disabled_by': None, - 'domain': 'zeversolar', - 'entry_id': , - 'minor_version': 1, - 'options': dict({ - }), - 'pref_disable_new_entities': False, - 'pref_disable_polling': False, - 'source': 'user', - 'title': 'Mock Title', - 'unique_id': None, - 'version': 1, - }) -# --- # name: test_sensors[sensor.zeversolar_sensor_energy_today-entry] EntityRegistryEntrySnapshot({ 'aliases': set({ diff --git a/tests/components/zha/snapshots/test_diagnostics.ambr b/tests/components/zha/snapshots/test_diagnostics.ambr new file mode 100644 index 00000000000..e0da54e2492 --- /dev/null +++ b/tests/components/zha/snapshots/test_diagnostics.ambr @@ -0,0 +1,292 @@ +# serializer version: 1 +# name: test_diagnostics_for_config_entry + dict({ + 'application_state': dict({ + 'broadcast_counters': dict({ + }), + 'counters': dict({ + 'ezsp_counters': dict({ + 'counter_1': dict({ + '__type': "", + 'repr': "Counter(name='counter_1', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + 'counter_2': dict({ + '__type': "", + 'repr': "Counter(name='counter_2', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + 'counter_3': dict({ + '__type': "", + 'repr': "Counter(name='counter_3', _raw_value=1, reset_count=0, _last_reset_value=0)", + }), + }), + }), + 'device_counters': dict({ + }), + 'group_counters': dict({ + }), + 'network_info': dict({ + 'channel': 15, + 'channel_mask': 0, + 'children': list([ + ]), + 'extended_pan_id': '**REDACTED**', + 'key_table': list([ + ]), + 'metadata': dict({ + }), + 'network_key': '**REDACTED**', + 'nwk_addresses': dict({ + }), + 'nwk_manager_id': 0, + 'nwk_update_id': 0, + 'pan_id': 4660, + 'security_level': 0, + 'source': None, + 'stack_specific': dict({ + }), + 'tc_link_key': dict({ + 'key': list([ + 90, + 105, + 103, + 66, + 101, + 101, + 65, + 108, + 108, + 105, + 97, + 110, + 99, + 101, + 48, + 57, + ]), + 'partner_ieee': '**REDACTED**', + 'rx_counter': 0, + 'seq': 0, + 'tx_counter': 0, + }), + }), + 'node_info': dict({ + 'ieee': '**REDACTED**', + 'logical_type': 2, + 'manufacturer': 'Coordinator Manufacturer', + 'model': 'Coordinator Model', + 'nwk': 0, + 'version': None, + }), + }), + 'config': dict({ + 'device_config': dict({ + }), + 'enable_quirks': False, + }), + 'config_entry': dict({ + 'data': dict({ + 'device': dict({ + 'baudrate': 115200, + 'flow_control': 'hardware', + 'path': '/dev/ttyUSB0', + }), + 'radio_type': 'ezsp', + }), + 'disabled_by': None, + 'domain': 'zha', + 'minor_version': 1, + 'options': dict({ + 'custom_configuration': dict({ + 'zha_alarm_options': dict({ + 'alarm_arm_requires_code': False, + 'alarm_failed_tries': 2, + 'alarm_master_code': '**REDACTED**', + }), + 'zha_options': dict({ + 'enhanced_light_transition': True, + 'group_members_assume_state': False, + }), + }), + }), + 'pref_disable_new_entities': False, + 'pref_disable_polling': False, + 'source': 'user', + 'title': 'Mock Title', + 'unique_id': None, + 'version': 4, + }), + 'devices': list([ + dict({ + 'logical_type': 'Coordinator', + 'manufacturer': 'Coordinator Manufacturer', + 'model': 'Coordinator Model', + }), + dict({ + 'logical_type': 'EndDevice', + 'manufacturer': 'FakeManufacturer', + 'model': 'FakeModel', + }), + ]), + 'energy_scan': dict({ + '11': 4.313725490196078, + '12': 4.705882352941177, + '13': 5.098039215686274, + '14': 5.490196078431373, + '15': 5.882352941176471, + '16': 6.2745098039215685, + '17': 6.666666666666667, + '18': 7.0588235294117645, + '19': 7.450980392156863, + '20': 7.8431372549019605, + '21': 8.235294117647058, + '22': 8.627450980392156, + '23': 9.019607843137255, + '24': 9.411764705882353, + '25': 9.803921568627452, + '26': 10.196078431372548, + }), + }) +# --- +# name: test_diagnostics_for_device + dict({ + 'active_coordinator': False, + 'area_id': None, + 'available': True, + 'cluster_details': dict({ + '1': dict({ + 'device_type': dict({ + 'id': 1025, + 'name': 'IAS_ANCILLARY_CONTROL', + }), + 'in_clusters': dict({ + '0x0500': dict({ + 'attributes': dict({ + '0x0000': dict({ + 'attribute': "ZCLAttributeDef(id=0x0000, name='zone_state', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0001': dict({ + 'attribute': "ZCLAttributeDef(id=0x0001, name='zone_type', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0002': dict({ + 'attribute': "ZCLAttributeDef(id=0x0002, name='zone_status', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0010': dict({ + 'attribute': "ZCLAttributeDef(id=0x0010, name='cie_addr', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': list([ + 50, + 79, + 50, + 2, + 0, + 141, + 21, + 0, + ]), + }), + '0x0011': dict({ + 'attribute': "ZCLAttributeDef(id=0x0011, name='zone_id', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0012': dict({ + 'attribute': "ZCLAttributeDef(id=0x0012, name='num_zone_sensitivity_levels_supported', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, + }), + '0x0013': dict({ + 'attribute': "ZCLAttributeDef(id=0x0013, name='current_zone_sensitivity_level', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, + }), + }), + 'endpoint_attribute': 'ias_zone', + 'unsupported_attributes': list([ + 18, + 'current_zone_sensitivity_level', + ]), + }), + '0x0501': dict({ + 'attributes': dict({ + '0xfffd': dict({ + 'attribute': "ZCLAttributeDef(id=0xFFFD, name='cluster_revision', type=, zcl_type=, access=, mandatory=True, is_manufacturer_specific=False)", + 'value': None, + }), + '0xfffe': dict({ + 'attribute': "ZCLAttributeDef(id=0xFFFE, name='reporting_status', type=, zcl_type=, access=, mandatory=False, is_manufacturer_specific=False)", + 'value': None, + }), + }), + 'endpoint_attribute': 'ias_ace', + 'unsupported_attributes': list([ + 4096, + 'unknown_attribute_name', + ]), + }), + }), + 'out_clusters': dict({ + }), + 'profile_id': 260, + }), + }), + 'device_type': 'EndDevice', + 'endpoint_names': list([ + dict({ + 'name': 'IAS_ANCILLARY_CONTROL', + }), + ]), + 'entities': list([ + dict({ + 'entity_id': 'alarm_control_panel.fakemanufacturer_fakemodel_alarm_control_panel', + 'name': 'FakeManufacturer FakeModel', + }), + ]), + 'ieee': '**REDACTED**', + 'lqi': None, + 'manufacturer': 'FakeManufacturer', + 'manufacturer_code': 4098, + 'model': 'FakeModel', + 'name': 'FakeManufacturer FakeModel', + 'neighbors': list([ + ]), + 'nwk': 47004, + 'power_source': 'Mains', + 'quirk_applied': False, + 'quirk_class': 'zigpy.device.Device', + 'quirk_id': None, + 'routes': list([ + ]), + 'rssi': None, + 'signature': dict({ + 'endpoints': dict({ + '1': dict({ + 'device_type': '0x0401', + 'input_clusters': list([ + '0x0500', + '0x0501', + ]), + 'output_clusters': list([ + ]), + 'profile_id': '0x0104', + }), + }), + 'manufacturer': 'FakeManufacturer', + 'model': 'FakeModel', + 'node_descriptor': dict({ + 'aps_flags': 0, + 'complex_descriptor_available': 0, + 'descriptor_capability_field': 0, + 'frequency_band': 8, + 'logical_type': 2, + 'mac_capability_flags': 140, + 'manufacturer_code': 4098, + 'maximum_buffer_size': 82, + 'maximum_incoming_transfer_size': 82, + 'maximum_outgoing_transfer_size': 82, + 'reserved': 0, + 'server_mask': 0, + 'user_descriptor_available': 0, + }), + }), + 'user_given_name': None, + }) +# --- diff --git a/tests/components/zha/test_config_flow.py b/tests/components/zha/test_config_flow.py index f3104141269..af6f2d9af0c 100644 --- a/tests/components/zha/test_config_flow.py +++ b/tests/components/zha/test_config_flow.py @@ -1,14 +1,16 @@ """Tests for ZHA config flow.""" +from collections.abc import Callable, Coroutine, Generator import copy from datetime import timedelta from ipaddress import ip_address import json +from typing import Any from unittest.mock import AsyncMock, MagicMock, PropertyMock, create_autospec, patch import uuid import pytest -import serial.tools.list_ports +from serial.tools.list_ports_common import ListPortInfo from zha.application.const import RadioType from zigpy.backups import BackupManager import zigpy.config @@ -36,6 +38,7 @@ from homeassistant.config_entries import ( SOURCE_USER, SOURCE_ZEROCONF, ConfigEntryState, + ConfigFlowResult, ) from homeassistant.const import CONF_SOURCE from homeassistant.core import HomeAssistant @@ -43,6 +46,9 @@ from homeassistant.data_entry_flow import FlowResultType from tests.common import MockConfigEntry +type RadioPicker = Callable[ + [RadioType], Coroutine[Any, Any, tuple[ConfigFlowResult, ListPortInfo]] +] PROBE_FUNCTION_PATH = "zigbee.application.ControllerApplication.probe" @@ -70,7 +76,7 @@ def mock_multipan_platform(): @pytest.fixture(autouse=True) -def mock_app(): +def mock_app() -> Generator[AsyncMock]: """Mock zigpy app interface.""" mock_app = AsyncMock() mock_app.backups = create_autospec(BackupManager, instance=True) @@ -130,9 +136,9 @@ def mock_detect_radio_type( return detect -def com_port(device="/dev/ttyUSB1234"): +def com_port(device="/dev/ttyUSB1234") -> ListPortInfo: """Mock of a serial port.""" - port = serial.tools.list_ports_common.ListPortInfo("/dev/ttyUSB1234") + port = ListPortInfo("/dev/ttyUSB1234") port.serial_number = "1234" port.manufacturer = "Virtual serial port" port.device = device @@ -1038,10 +1044,12 @@ def test_prevent_overwrite_ezsp_ieee() -> None: @pytest.fixture -def pick_radio(hass): +def pick_radio( + hass: HomeAssistant, +) -> Generator[RadioPicker]: """Fixture for the first step of the config flow (where a radio is picked).""" - async def wrapper(radio_type): + async def wrapper(radio_type: RadioType) -> tuple[ConfigFlowResult, ListPortInfo]: port = com_port() port_select = f"{port}, s/n: {port.serial_number} - {port.manufacturer}" @@ -1070,7 +1078,7 @@ def pick_radio(hass): async def test_strategy_no_network_settings( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test formation strategy when no network settings are present.""" mock_app.load_network_info = MagicMock(side_effect=NetworkNotFormed()) @@ -1083,7 +1091,7 @@ async def test_strategy_no_network_settings( async def test_formation_strategy_form_new_network( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test forming a new network.""" result, port = await pick_radio(RadioType.ezsp) @@ -1101,7 +1109,7 @@ async def test_formation_strategy_form_new_network( async def test_formation_strategy_form_initial_network( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test forming a new network, with no previous settings on the radio.""" mock_app.load_network_info = AsyncMock(side_effect=NetworkNotFormed()) @@ -1122,7 +1130,7 @@ async def test_formation_strategy_form_initial_network( @patch(f"zigpy_znp.{PROBE_FUNCTION_PATH}", AsyncMock(return_value=True)) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_onboarding_auto_formation_new_hardware( - mock_app, hass: HomeAssistant + mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test auto network formation with new hardware during onboarding.""" mock_app.load_network_info = AsyncMock(side_effect=NetworkNotFormed()) @@ -1157,7 +1165,7 @@ async def test_onboarding_auto_formation_new_hardware( async def test_formation_strategy_reuse_settings( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test reusing existing network settings.""" result, port = await pick_radio(RadioType.ezsp) @@ -1190,7 +1198,10 @@ def test_parse_uploaded_backup(process_mock) -> None: @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_non_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on non-EZSP coordinators.""" result, port = await pick_radio(RadioType.znp) @@ -1222,7 +1233,11 @@ async def test_formation_strategy_restore_manual_backup_non_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_overwrite_ieee_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, backup, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + backup, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on EZSP coordinators (overwrite IEEE).""" result, port = await pick_radio(RadioType.ezsp) @@ -1262,7 +1277,10 @@ async def test_formation_strategy_restore_manual_backup_overwrite_ieee_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_formation_strategy_restore_manual_backup_ezsp( - allow_overwrite_ieee_mock, pick_radio, mock_app, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + hass: HomeAssistant, ) -> None: """Test restoring a manual backup on EZSP coordinators (don't overwrite IEEE).""" result, port = await pick_radio(RadioType.ezsp) @@ -1303,7 +1321,7 @@ async def test_formation_strategy_restore_manual_backup_ezsp( async def test_formation_strategy_restore_manual_backup_invalid_upload( - pick_radio, mock_app, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, hass: HomeAssistant ) -> None: """Test restoring a manual backup but an invalid file is uploaded.""" result, port = await pick_radio(RadioType.ezsp) @@ -1355,7 +1373,7 @@ def test_format_backup_choice() -> None: ) @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) async def test_formation_strategy_restore_automatic_backup_ezsp( - pick_radio, mock_app, make_backup, hass: HomeAssistant + pick_radio: RadioPicker, mock_app: AsyncMock, make_backup, hass: HomeAssistant ) -> None: """Test restoring an automatic backup (EZSP radio).""" mock_app.backups.backups = [ @@ -1404,7 +1422,11 @@ async def test_formation_strategy_restore_automatic_backup_ezsp( @patch("homeassistant.components.zha.async_setup_entry", AsyncMock(return_value=True)) @pytest.mark.parametrize("is_advanced", [True, False]) async def test_formation_strategy_restore_automatic_backup_non_ezsp( - is_advanced, pick_radio, mock_app, make_backup, hass: HomeAssistant + is_advanced, + pick_radio: RadioPicker, + mock_app: AsyncMock, + make_backup, + hass: HomeAssistant, ) -> None: """Test restoring an automatic backup (non-EZSP radio).""" mock_app.backups.backups = [ @@ -1457,7 +1479,11 @@ async def test_formation_strategy_restore_automatic_backup_non_ezsp( @patch("homeassistant.components.zha.radio_manager._allow_overwrite_ezsp_ieee") async def test_ezsp_restore_without_settings_change_ieee( - allow_overwrite_ieee_mock, pick_radio, mock_app, backup, hass: HomeAssistant + allow_overwrite_ieee_mock, + pick_radio: RadioPicker, + mock_app: AsyncMock, + backup, + hass: HomeAssistant, ) -> None: """Test a manual backup on EZSP coordinators without settings (no IEEE write).""" # Fail to load settings diff --git a/tests/components/zha/test_diagnostics.py b/tests/components/zha/test_diagnostics.py index bbdc6271207..ed3f83c0c36 100644 --- a/tests/components/zha/test_diagnostics.py +++ b/tests/components/zha/test_diagnostics.py @@ -3,11 +3,11 @@ from unittest.mock import patch import pytest +from syrupy.assertion import SnapshotAssertion +from syrupy.filters import props from zigpy.profiles import zha from zigpy.zcl.clusters import security -from homeassistant.components.diagnostics import REDACTED -from homeassistant.components.zha.diagnostics import KEYS_TO_REDACT from homeassistant.components.zha.helpers import ( ZHADeviceProxy, ZHAGatewayProxy, @@ -27,14 +27,6 @@ from tests.components.diagnostics import ( ) from tests.typing import ClientSessionGenerator -CONFIG_ENTRY_DIAGNOSTICS_KEYS = [ - "config", - "config_entry", - "application_state", - "versions", - "devices", -] - @pytest.fixture(autouse=True) def required_platforms_only(): @@ -51,6 +43,7 @@ async def test_diagnostics_for_config_entry( config_entry: MockConfigEntry, setup_zha, zigpy_device_mock, + snapshot: SnapshotAssertion, ) -> None: """Test diagnostics for config entry.""" @@ -81,30 +74,9 @@ async def test_diagnostics_for_config_entry( hass, hass_client, config_entry ) - for key in CONFIG_ENTRY_DIAGNOSTICS_KEYS: - assert key in diagnostics_data - assert diagnostics_data[key] is not None - - # Energy scan results are presented as a percentage. JSON object keys also must be - # strings, not integers. - assert diagnostics_data["energy_scan"] == { - str(k): 100 * v / 255 for k, v in scan.items() - } - - assert isinstance(diagnostics_data["devices"], list) - assert len(diagnostics_data["devices"]) == 2 - assert diagnostics_data["devices"] == [ - { - "manufacturer": "Coordinator Manufacturer", - "model": "Coordinator Model", - "logical_type": "Coordinator", - }, - { - "manufacturer": "FakeManufacturer", - "model": "FakeModel", - "logical_type": "EndDevice", - }, - ] + assert diagnostics_data == snapshot( + exclude=props("created_at", "modified_at", "entry_id", "versions") + ) async def test_diagnostics_for_device( @@ -114,6 +86,7 @@ async def test_diagnostics_for_device( config_entry: MockConfigEntry, setup_zha, zigpy_device_mock, + snapshot: SnapshotAssertion, ) -> None: """Test diagnostics for device.""" await setup_zha() @@ -161,11 +134,5 @@ async def test_diagnostics_for_device( diagnostics_data = await get_diagnostics_for_device( hass, hass_client, config_entry, device ) - assert diagnostics_data - device_info: dict = zha_device_proxy.zha_device_info - for key in device_info: - assert key in diagnostics_data - if key not in KEYS_TO_REDACT: - assert key in diagnostics_data - else: - assert diagnostics_data[key] == REDACTED + + assert diagnostics_data == snapshot(exclude=props("device_reg_id", "last_seen")) diff --git a/tests/components/zha/test_helpers.py b/tests/components/zha/test_helpers.py index 13c03c17cf7..d3392685437 100644 --- a/tests/components/zha/test_helpers.py +++ b/tests/components/zha/test_helpers.py @@ -60,16 +60,14 @@ async def test_zcl_schema_conversions(hass: HomeAssistant) -> None: "required": True, }, { - "type": "integer", - "valueMin": 0, - "valueMax": 255, + "type": "multi_select", + "options": ["Execute if off present"], "name": "options_mask", "optional": True, }, { - "type": "integer", - "valueMin": 0, - "valueMax": 255, + "type": "multi_select", + "options": ["Execute if off"], "name": "options_override", "optional": True, }, diff --git a/tests/components/zha/test_update.py b/tests/components/zha/test_update.py index 6a1a19b407f..e2a614915f9 100644 --- a/tests/components/zha/test_update.py +++ b/tests/components/zha/test_update.py @@ -3,8 +3,11 @@ from unittest.mock import AsyncMock, call, patch import pytest +from zha.application.platforms.update import ( + FirmwareUpdateEntity as ZhaFirmwareUpdateEntity, +) from zigpy.exceptions import DeliveryError -from zigpy.ota import OtaImageWithMetadata +from zigpy.ota import OtaImagesResult, OtaImageWithMetadata import zigpy.ota.image as firmware from zigpy.ota.providers import BaseOtaImageMetadata from zigpy.profiles import zha @@ -43,6 +46,8 @@ from homeassistant.setup import async_setup_component from .common import find_entity_id, update_attribute_cache from .conftest import SIG_EP_INPUT, SIG_EP_OUTPUT, SIG_EP_PROFILE, SIG_EP_TYPE +from tests.typing import WebSocketGenerator + @pytest.fixture(autouse=True) def update_platform_only(): @@ -119,8 +124,11 @@ async def setup_test_data( ), ) - cluster.endpoint.device.application.ota.get_ota_image = AsyncMock( - return_value=None if file_not_found else fw_image + cluster.endpoint.device.application.ota.get_ota_images = AsyncMock( + return_value=OtaImagesResult( + upgrades=() if file_not_found else (fw_image,), + downgrades=(), + ) ) zha_device_proxy: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) zha_device_proxy.device.async_update_sw_build_id(installed_fw_version) @@ -544,81 +552,56 @@ async def test_firmware_update_raises( ) -async def test_firmware_update_no_longer_compatible( +async def test_update_release_notes( hass: HomeAssistant, + hass_ws_client: WebSocketGenerator, setup_zha, zigpy_device_mock, ) -> None: - """Test ZHA update platform - firmware update is no longer valid.""" + """Test ZHA update platform release notes.""" await setup_zha() - zha_device, cluster, fw_image, installed_fw_version = await setup_test_data( - hass, zigpy_device_mock + + gateway = get_zha_gateway(hass) + gateway_proxy: ZHAGatewayProxy = get_zha_gateway_proxy(hass) + + zigpy_device = zigpy_device_mock( + { + 1: { + SIG_EP_INPUT: [general.Basic.cluster_id, general.OnOff.cluster_id], + SIG_EP_OUTPUT: [general.Ota.cluster_id], + SIG_EP_TYPE: zha.DeviceType.ON_OFF_SWITCH, + SIG_EP_PROFILE: zha.PROFILE_ID, + } + }, + node_descriptor=b"\x02@\x84_\x11\x7fd\x00\x00,d\x00\x00", ) + gateway.get_or_create_device(zigpy_device) + await gateway.async_device_initialized(zigpy_device) + await hass.async_block_till_done(wait_background_tasks=True) + + zha_device: ZHADeviceProxy = gateway_proxy.get_device_proxy(zigpy_device.ieee) + zha_lib_entity = next( + e + for e in zha_device.device.platform_entities.values() + if isinstance(e, ZhaFirmwareUpdateEntity) + ) + zha_lib_entity._attr_release_notes = "Some lengthy release notes" + zha_lib_entity.maybe_emit_state_changed_event() + await hass.async_block_till_done() + entity_id = find_entity_id(Platform.UPDATE, zha_device, hass) assert entity_id is not None - assert hass.states.get(entity_id).state == STATE_UNKNOWN - - # simulate an image available notification - await cluster._handle_query_next_image( - foundation.ZCLHeader.cluster( - tsn=0x12, command_id=general.Ota.ServerCommandDefs.query_next_image.id - ), - general.QueryNextImageCommand( - fw_image.firmware.header.field_control, - zha_device.device.manufacturer_code, - fw_image.firmware.header.image_type, - installed_fw_version, - fw_image.firmware.header.header_version, - ), + ws_client = await hass_ws_client(hass) + await ws_client.send_json( + { + "id": 1, + "type": "update/release_notes", + "entity_id": entity_id, + } ) - await hass.async_block_till_done() - state = hass.states.get(entity_id) - assert state.state == STATE_ON - attrs = state.attributes - assert attrs[ATTR_INSTALLED_VERSION] == f"0x{installed_fw_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] - assert ( - attrs[ATTR_LATEST_VERSION] == f"0x{fw_image.firmware.header.file_version:08x}" - ) - - new_version = 0x99999999 - - async def endpoint_reply(cluster_id, tsn, data, command_id): - if cluster_id == general.Ota.cluster_id: - hdr, cmd = cluster.deserialize(data) - if isinstance(cmd, general.Ota.ImageNotifyCommand): - zha_device.device.device.packet_received( - make_packet( - zha_device.device.device, - cluster, - general.Ota.ServerCommandDefs.query_next_image.name, - field_control=general.Ota.QueryNextImageCommand.FieldControl.HardwareVersion, - manufacturer_code=fw_image.firmware.header.manufacturer_id, - image_type=fw_image.firmware.header.image_type, - # The device reports that it is no longer compatible! - current_file_version=new_version, - hardware_version=1, - ) - ) - - cluster.endpoint.reply = AsyncMock(side_effect=endpoint_reply) - with pytest.raises(HomeAssistantError): - await hass.services.async_call( - UPDATE_DOMAIN, - SERVICE_INSTALL, - { - ATTR_ENTITY_ID: entity_id, - }, - blocking=True, - ) - - # We updated the currently installed firmware version, as it is no longer valid - state = hass.states.get(entity_id) - assert state.state == STATE_OFF - attrs = state.attributes - assert attrs[ATTR_INSTALLED_VERSION] == f"0x{new_version:08x}" - assert not attrs[ATTR_IN_PROGRESS] - assert attrs[ATTR_LATEST_VERSION] == f"0x{new_version:08x}" + result = await ws_client.receive_json() + assert result["success"] is True + assert result["result"] == "Some lengthy release notes" diff --git a/tests/components/zwave_js/conftest.py b/tests/components/zwave_js/conftest.py index 60deb7dbce8..a6bbe554f9a 100644 --- a/tests/components/zwave_js/conftest.py +++ b/tests/components/zwave_js/conftest.py @@ -1,11 +1,9 @@ """Provide common Z-Wave JS fixtures.""" import asyncio -from collections.abc import Generator import copy import io import json -from typing import Any from unittest.mock import DEFAULT, AsyncMock, patch import pytest @@ -18,242 +16,6 @@ from homeassistant.core import HomeAssistant from tests.common import MockConfigEntry, load_fixture -# Add-on fixtures - - -@pytest.fixture(name="addon_info_side_effect") -def addon_info_side_effect_fixture() -> Any | None: - """Return the add-on info side effect.""" - return None - - -@pytest.fixture(name="addon_info") -def mock_addon_info(addon_info_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_info", - side_effect=addon_info_side_effect, - ) as addon_info: - addon_info.return_value = { - "available": False, - "hostname": None, - "options": {}, - "state": None, - "update_available": False, - "version": None, - } - yield addon_info - - -@pytest.fixture(name="addon_store_info_side_effect") -def addon_store_info_side_effect_fixture() -> Any | None: - """Return the add-on store info side effect.""" - return None - - -@pytest.fixture(name="addon_store_info") -def mock_addon_store_info( - addon_store_info_side_effect: Any | None, -) -> Generator[AsyncMock]: - """Mock Supervisor add-on info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_store_info", - side_effect=addon_store_info_side_effect, - ) as addon_store_info: - addon_store_info.return_value = { - "available": False, - "installed": None, - "state": None, - "version": "1.0.0", - } - yield addon_store_info - - -@pytest.fixture(name="addon_running") -def mock_addon_running(addon_store_info: AsyncMock, addon_info: AsyncMock) -> AsyncMock: - """Mock add-on already running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "started" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_installed") -def mock_addon_installed( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on already installed but not running.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - return addon_info - - -@pytest.fixture(name="addon_not_installed") -def mock_addon_not_installed( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> AsyncMock: - """Mock add-on not installed.""" - addon_store_info.return_value["available"] = True - return addon_info - - -@pytest.fixture(name="addon_options") -def mock_addon_options(addon_info: AsyncMock): - """Mock add-on options.""" - return addon_info.return_value["options"] - - -@pytest.fixture(name="set_addon_options_side_effect") -def set_addon_options_side_effect_fixture( - addon_options: dict[str, Any], -) -> Any | None: - """Return the set add-on options side effect.""" - - async def set_addon_options(hass: HomeAssistant, slug: str, options: dict) -> None: - """Mock set add-on options.""" - addon_options.update(options["options"]) - - return set_addon_options - - -@pytest.fixture(name="set_addon_options") -def mock_set_addon_options( - set_addon_options_side_effect: Any | None, -) -> Generator[AsyncMock]: - """Mock set add-on options.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_set_addon_options", - side_effect=set_addon_options_side_effect, - ) as set_options: - yield set_options - - -@pytest.fixture(name="install_addon_side_effect") -def install_addon_side_effect_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Any | None: - """Return the install add-on side effect.""" - - async def install_addon(hass: HomeAssistant, slug): - """Mock install add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "stopped", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "stopped" - addon_info.return_value["version"] = "1.0.0" - - return install_addon - - -@pytest.fixture(name="install_addon") -def mock_install_addon(install_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock install add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_install_addon", - side_effect=install_addon_side_effect, - ) as install_addon: - yield install_addon - - -@pytest.fixture(name="update_addon") -def mock_update_addon() -> Generator[AsyncMock]: - """Mock update add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_update_addon" - ) as update_addon: - yield update_addon - - -@pytest.fixture(name="start_addon_side_effect") -def start_addon_side_effect_fixture( - addon_store_info: AsyncMock, addon_info: AsyncMock -) -> Any | None: - """Return the start add-on options side effect.""" - - async def start_addon(hass: HomeAssistant, slug): - """Mock start add-on.""" - addon_store_info.return_value = { - "available": True, - "installed": "1.0.0", - "state": "started", - "version": "1.0.0", - } - addon_info.return_value["available"] = True - addon_info.return_value["state"] = "started" - - return start_addon - - -@pytest.fixture(name="start_addon") -def mock_start_addon(start_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock start add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_start_addon", - side_effect=start_addon_side_effect, - ) as start_addon: - yield start_addon - - -@pytest.fixture(name="stop_addon") -def stop_addon_fixture() -> Generator[AsyncMock]: - """Mock stop add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_stop_addon" - ) as stop_addon: - yield stop_addon - - -@pytest.fixture(name="restart_addon_side_effect") -def restart_addon_side_effect_fixture() -> Any | None: - """Return the restart add-on options side effect.""" - return None - - -@pytest.fixture(name="restart_addon") -def mock_restart_addon(restart_addon_side_effect: Any | None) -> Generator[AsyncMock]: - """Mock restart add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_restart_addon", - side_effect=restart_addon_side_effect, - ) as restart_addon: - yield restart_addon - - -@pytest.fixture(name="uninstall_addon") -def uninstall_addon_fixture() -> Generator[AsyncMock]: - """Mock uninstall add-on.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_uninstall_addon" - ) as uninstall_addon: - yield uninstall_addon - - -@pytest.fixture(name="create_backup") -def create_backup_fixture() -> Generator[AsyncMock]: - """Mock create backup.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_create_backup" - ) as create_backup: - yield create_backup - - # State fixtures diff --git a/tests/components/zwave_js/test_config_flow.py b/tests/components/zwave_js/test_config_flow.py index 46172f72b2f..a3affb6b977 100644 --- a/tests/components/zwave_js/test_config_flow.py +++ b/tests/components/zwave_js/test_config_flow.py @@ -5,7 +5,7 @@ from collections.abc import Generator from copy import copy from ipaddress import ip_address from typing import Any -from unittest.mock import DEFAULT, AsyncMock, MagicMock, call, patch +from unittest.mock import AsyncMock, MagicMock, call, patch import aiohttp import pytest @@ -77,31 +77,6 @@ def mock_supervisor_fixture() -> Generator[None]: yield -@pytest.fixture(name="discovery_info") -def discovery_info_fixture() -> dict[str, Any]: - """Return the discovery info from the supervisor.""" - return DEFAULT - - -@pytest.fixture(name="discovery_info_side_effect") -def discovery_info_side_effect_fixture() -> Any | None: - """Return the discovery info from the supervisor.""" - return None - - -@pytest.fixture(name="get_addon_discovery_info") -def mock_get_addon_discovery_info( - discovery_info: dict[str, Any], discovery_info_side_effect: Any | None -) -> Generator[AsyncMock]: - """Mock get add-on discovery info.""" - with patch( - "homeassistant.components.hassio.addon_manager.async_get_addon_discovery_info", - side_effect=discovery_info_side_effect, - return_value=discovery_info, - ) as get_addon_discovery_info: - yield get_addon_discovery_info - - @pytest.fixture(name="server_version_side_effect") def server_version_side_effect_fixture() -> Any | None: """Return the server version side effect.""" @@ -2751,104 +2726,6 @@ async def test_options_addon_not_installed( assert client.disconnect.call_count == 1 -@pytest.mark.parametrize("discovery_info", [{"config": ADDON_DISCOVERY_INFO}]) -async def test_import_addon_installed( - hass: HomeAssistant, - supervisor, - addon_installed, - addon_options, - set_addon_options, - start_addon, - get_addon_discovery_info, - serial_port, -) -> None: - """Test import step while add-on already installed on Supervisor.""" - serial_port.device = "/test/imported" - result = await hass.config_entries.flow.async_init( - DOMAIN, - context={"source": config_entries.SOURCE_IMPORT}, - data={"usb_path": "/test/imported", "network_key": "imported123"}, - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "on_supervisor" - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], {"use_addon": True} - ) - - assert result["type"] is FlowResultType.FORM - assert result["step_id"] == "configure_addon" - - # the default input should be the imported data - default_input = result["data_schema"]({}) - - assert default_input == { - "usb_path": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - } - - result = await hass.config_entries.flow.async_configure( - result["flow_id"], default_input - ) - - assert set_addon_options.call_args == call( - hass, - "core_zwave_js", - { - "options": { - "device": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - } - }, - ) - - assert result["type"] is FlowResultType.SHOW_PROGRESS - assert result["step_id"] == "start_addon" - - with ( - patch( - "homeassistant.components.zwave_js.async_setup", return_value=True - ) as mock_setup, - patch( - "homeassistant.components.zwave_js.async_setup_entry", - return_value=True, - ) as mock_setup_entry, - ): - await hass.async_block_till_done() - result = await hass.config_entries.flow.async_configure(result["flow_id"]) - await hass.async_block_till_done() - - assert start_addon.call_args == call(hass, "core_zwave_js") - - assert result["type"] is FlowResultType.CREATE_ENTRY - assert result["title"] == TITLE - assert result["data"] == { - "url": "ws://host1:3001", - "usb_path": "/test/imported", - "s0_legacy_key": "imported123", - "s2_access_control_key": "", - "s2_authenticated_key": "", - "s2_unauthenticated_key": "", - "lr_s2_access_control_key": "", - "lr_s2_authenticated_key": "", - "use_addon": True, - "integration_created_addon": False, - } - assert len(mock_setup.mock_calls) == 1 - assert len(mock_setup_entry.mock_calls) == 1 - - async def test_zeroconf(hass: HomeAssistant) -> None: """Test zeroconf discovery.""" diff --git a/tests/conftest.py b/tests/conftest.py index 0667edf4be2..df183f955cb 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -84,7 +84,7 @@ from homeassistant.helpers.translation import _TranslationsCacheData from homeassistant.helpers.typing import ConfigType from homeassistant.setup import async_setup_component from homeassistant.util import dt as dt_util, location -from homeassistant.util.async_ import create_eager_task +from homeassistant.util.async_ import create_eager_task, get_scheduled_timer_handles from homeassistant.util.json import json_loads from .ignore_uncaught_exceptions import IGNORE_UNCAUGHT_EXCEPTIONS @@ -372,7 +372,7 @@ def verify_cleanup( if tasks: event_loop.run_until_complete(asyncio.wait(tasks)) - for handle in event_loop._scheduled: # type: ignore[attr-defined] + for handle in get_scheduled_timer_handles(event_loop): if not handle.cancelled(): with long_repr_strings(): if expected_lingering_timers: @@ -1262,6 +1262,16 @@ def enable_statistics() -> bool: return False +@pytest.fixture +def enable_missing_statistics() -> bool: + """Fixture to control enabling of recorder's statistics compilation. + + To enable statistics, tests can be marked with: + @pytest.mark.parametrize("enable_missing_statistics", [True]) + """ + return False + + @pytest.fixture def enable_schema_validation() -> bool: """Fixture to control enabling of recorder's statistics table validation. @@ -1453,6 +1463,7 @@ async def async_test_recorder( recorder_db_url: str, enable_nightly_purge: bool, enable_statistics: bool, + enable_missing_statistics: bool, enable_schema_validation: bool, enable_migrate_context_ids: bool, enable_migrate_event_type_ids: bool, @@ -1511,7 +1522,7 @@ async def async_test_recorder( ) compile_missing = ( recorder.Recorder._schedule_compile_missing_statistics - if enable_statistics + if enable_missing_statistics else None ) migrate_states_context_ids = ( diff --git a/tests/hassfest/test_requirements.py b/tests/hassfest/test_requirements.py index f3b008a6113..433e63d904c 100644 --- a/tests/hassfest/test_requirements.py +++ b/tests/hassfest/test_requirements.py @@ -4,7 +4,7 @@ from pathlib import Path import pytest -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration from script.hassfest.requirements import validate_requirements_format @@ -13,6 +13,13 @@ def integration(): """Fixture for hassfest integration model.""" return Integration( path=Path("homeassistant/components/test"), + _config=Config( + root=Path(".").absolute(), + specific_integrations=None, + action="validate", + requirements=True, + core_integrations_path=Path("homeassistant/components"), + ), _manifest={ "domain": "test", "documentation": "https://example.com", diff --git a/tests/hassfest/test_version.py b/tests/hassfest/test_version.py index bfe15018fe2..30677356101 100644 --- a/tests/hassfest/test_version.py +++ b/tests/hassfest/test_version.py @@ -1,5 +1,7 @@ """Tests for hassfest version.""" +from pathlib import Path + import pytest import voluptuous as vol @@ -7,13 +9,22 @@ from script.hassfest.manifest import ( CUSTOM_INTEGRATION_MANIFEST_SCHEMA, validate_version, ) -from script.hassfest.model import Integration +from script.hassfest.model import Config, Integration @pytest.fixture def integration(): """Fixture for hassfest integration model.""" - integration = Integration("") + integration = Integration( + "", + _config=Config( + root=Path(".").absolute(), + specific_integrations=None, + action="validate", + requirements=True, + core_integrations_path=Path("homeassistant/components"), + ), + ) integration._manifest = { "domain": "test", "documentation": "https://example.com", diff --git a/tests/helpers/test_area_registry.py b/tests/helpers/test_area_registry.py index ad571ac50cc..da1947adbc8 100644 --- a/tests/helpers/test_area_registry.py +++ b/tests/helpers/test_area_registry.py @@ -242,9 +242,12 @@ async def test_update_area_with_same_name_change_case( async def test_update_area_with_name_already_in_use( area_registry: ar.AreaRegistry, + floor_registry: fr.FloorRegistry, ) -> None: """Make sure that we can't update an area with a name already in use.""" - area1 = area_registry.async_create("mock1") + floor = floor_registry.async_create("mock") + floor_id = floor.floor_id + area1 = area_registry.async_create("mock1", floor_id=floor_id) area2 = area_registry.async_create("mock2") with pytest.raises(ValueError) as e_info: @@ -255,6 +258,8 @@ async def test_update_area_with_name_already_in_use( assert area2.name == "mock2" assert len(area_registry.areas) == 2 + assert area_registry.areas.get_areas_for_floor(floor_id) == [area1] + async def test_update_area_with_normalized_name_already_in_use( area_registry: ar.AreaRegistry, diff --git a/tests/helpers/test_collection.py b/tests/helpers/test_collection.py index f0287218d7f..f564f85ec3b 100644 --- a/tests/helpers/test_collection.py +++ b/tests/helpers/test_collection.py @@ -2,8 +2,10 @@ from __future__ import annotations +from datetime import timedelta import logging +from freezegun.api import FrozenDateTimeFactory import pytest import voluptuous as vol @@ -15,6 +17,7 @@ from homeassistant.helpers import ( storage, ) from homeassistant.helpers.typing import ConfigType +from homeassistant.util.dt import utcnow from tests.common import flush_store from tests.typing import WebSocketGenerator @@ -254,6 +257,84 @@ async def test_storage_collection(hass: HomeAssistant) -> None: } +async def test_storage_collection_update_modifiet_at( + hass: HomeAssistant, + entity_registry: er.EntityRegistry, + freezer: FrozenDateTimeFactory, +) -> None: + """Test that updating a storage collection will update the modified_at datetime in the entity registry.""" + + entities: dict[str, TestEntity] = {} + + class TestEntity(MockEntity): + """Entity that is config based.""" + + def __init__(self, config: ConfigType) -> None: + """Initialize entity.""" + super().__init__(config) + self._state = "initial" + + @classmethod + def from_storage(cls, config: ConfigType) -> TestEntity: + """Create instance from storage.""" + obj = super().from_storage(config) + entities[obj.unique_id] = obj + return obj + + @property + def state(self) -> str: + """Return state of entity.""" + return self._state + + def set_state(self, value: str) -> None: + """Set value.""" + self._state = value + self.async_write_ha_state() + + store = storage.Store(hass, 1, "test-data") + data = {"id": "mock-1", "name": "Mock 1", "data": 1} + await store.async_save( + { + "items": [ + data, + ] + } + ) + id_manager = collection.IDManager() + ent_comp = entity_component.EntityComponent(_LOGGER, "test", hass) + await ent_comp.async_setup({}) + coll = MockStorageCollection(store, id_manager) + collection.sync_entity_lifecycle(hass, "test", "test", ent_comp, coll, TestEntity) + changes = track_changes(coll) + + await coll.async_load() + assert id_manager.has_id("mock-1") + assert len(changes) == 1 + assert changes[0] == (collection.CHANGE_ADDED, "mock-1", data) + + modified_1 = entity_registry.async_get("test.mock_1").modified_at + assert modified_1 == utcnow() + + freezer.tick(timedelta(minutes=1)) + + updated_item = await coll.async_update_item("mock-1", {"data": 2}) + assert id_manager.has_id("mock-1") + assert updated_item == {"id": "mock-1", "name": "Mock 1", "data": 2} + assert len(changes) == 2 + assert changes[1] == (collection.CHANGE_UPDATED, "mock-1", updated_item) + + modified_2 = entity_registry.async_get("test.mock_1").modified_at + assert modified_2 > modified_1 + assert modified_2 == utcnow() + + freezer.tick(timedelta(minutes=1)) + + entities["mock-1"].set_state("second") + + modified_3 = entity_registry.async_get("test.mock_1").modified_at + assert modified_3 == modified_2 + + async def test_attach_entity_component_collection(hass: HomeAssistant) -> None: """Test attaching collection to entity component.""" ent_comp = entity_component.EntityComponent(_LOGGER, "test", hass) diff --git a/tests/helpers/test_config_entry_oauth2_flow.py b/tests/helpers/test_config_entry_oauth2_flow.py index 23919f3a6a3..52def52f3f0 100644 --- a/tests/helpers/test_config_entry_oauth2_flow.py +++ b/tests/helpers/test_config_entry_oauth2_flow.py @@ -873,7 +873,9 @@ async def test_implementation_provider(hass: HomeAssistant, local_impl) -> None: provider_source = [] - async def async_provide_implementation(hass, domain): + async def async_provide_implementation( + hass: HomeAssistant, domain: str + ) -> list[config_entry_oauth2_flow.AbstractOAuth2Implementation]: """Mock implementation provider.""" return provider_source diff --git a/tests/helpers/test_config_validation.py b/tests/helpers/test_config_validation.py index cf72012a1f1..57c712e2f10 100644 --- a/tests/helpers/test_config_validation.py +++ b/tests/helpers/test_config_validation.py @@ -3,13 +3,16 @@ from collections import OrderedDict from datetime import date, datetime, timedelta import enum +from functools import partial import logging import os from socket import _GLOBAL_DEFAULT_TIMEOUT +import threading from typing import Any -from unittest.mock import Mock, patch +from unittest.mock import ANY, Mock, patch import uuid +import py import pytest import voluptuous as vol @@ -33,7 +36,7 @@ def test_boolean() -> None: "T", "negative", "lock", - "tr ue", + "tr ue", # codespell:ignore ue [], [1, 2], {"one": "two"}, @@ -1492,7 +1495,7 @@ def test_whitespace() -> None: "T", "negative", "lock", - "tr ue", + "tr ue", # codespell:ignore ue [], [1, 2], {"one": "two"}, @@ -1738,3 +1741,91 @@ def test_determine_script_action_ambiguous() -> None: def test_determine_script_action_non_ambiguous() -> None: """Test determine script action with a non ambiguous action.""" assert cv.determine_script_action({"delay": "00:00:05"}) == "delay" + + +async def test_async_validate(hass: HomeAssistant, tmpdir: py.path.local) -> None: + """Test the async_validate helper.""" + validator_calls: dict[str, list[int]] = {} + + def _mock_validator_schema(real_func, *args): + calls = validator_calls.setdefault(real_func.__name__, []) + calls.append(threading.get_ident()) + return real_func(*args) + + CV_PREFIX = "homeassistant.helpers.config_validation" + with ( + patch(f"{CV_PREFIX}.isdir", wraps=partial(_mock_validator_schema, cv.isdir)), + patch(f"{CV_PREFIX}.string", wraps=partial(_mock_validator_schema, cv.string)), + ): + # Assert validation in event loop when not decorated with not_async_friendly + await cv.async_validate(hass, cv.string, "abcd") + assert validator_calls == {"string": [hass.loop_thread_id]} + validator_calls = {} + + # Assert validation in executor when decorated with not_async_friendly + await cv.async_validate(hass, cv.isdir, tmpdir) + assert validator_calls == {"isdir": [hass.loop_thread_id, ANY]} + assert validator_calls["isdir"][1] != hass.loop_thread_id + validator_calls = {} + + # Assert validation in executor when decorated with not_async_friendly + await cv.async_validate(hass, vol.All(cv.isdir, cv.string), tmpdir) + assert validator_calls == {"isdir": [hass.loop_thread_id, ANY], "string": [ANY]} + assert validator_calls["isdir"][1] != hass.loop_thread_id + assert validator_calls["string"][0] != hass.loop_thread_id + validator_calls = {} + + # Assert validation in executor when decorated with not_async_friendly + await cv.async_validate(hass, vol.All(cv.string, cv.isdir), tmpdir) + assert validator_calls == { + "isdir": [hass.loop_thread_id, ANY], + "string": [hass.loop_thread_id, ANY], + } + assert validator_calls["isdir"][1] != hass.loop_thread_id + assert validator_calls["string"][1] != hass.loop_thread_id + validator_calls = {} + + # Assert validation in event loop when not using cv.async_validate + cv.isdir(tmpdir) + assert validator_calls == {"isdir": [hass.loop_thread_id]} + validator_calls = {} + + # Assert validation in event loop when not using cv.async_validate + vol.All(cv.isdir, cv.string)(tmpdir) + assert validator_calls == { + "isdir": [hass.loop_thread_id], + "string": [hass.loop_thread_id], + } + validator_calls = {} + + # Assert validation in event loop when not using cv.async_validate + vol.All(cv.string, cv.isdir)(tmpdir) + assert validator_calls == { + "isdir": [hass.loop_thread_id], + "string": [hass.loop_thread_id], + } + validator_calls = {} + + +async def test_is_entity_service_schema( + hass: HomeAssistant, +) -> None: + """Test cv.is_entity_service_schema.""" + for schema in ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + vol.Any(cv.make_entity_service_schema({"some": str})), + ): + assert cv.is_entity_service_schema(schema) is False + + for schema in ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.Schema(vol.All(cv.make_entity_service_schema({"some": str}))), + vol.Schema(vol.Schema(cv.make_entity_service_schema({"some": str}))), + vol.All(cv.make_entity_service_schema({"some": str})), + vol.All(vol.All(cv.make_entity_service_schema({"some": str}))), + vol.All(vol.Schema(cv.make_entity_service_schema({"some": str}))), + ): + assert cv.is_entity_service_schema(schema) is True diff --git a/tests/helpers/test_discovery.py b/tests/helpers/test_discovery.py index 100b50e2749..a66ac7474e3 100644 --- a/tests/helpers/test_discovery.py +++ b/tests/helpers/test_discovery.py @@ -9,6 +9,8 @@ from homeassistant.const import Platform from homeassistant.core import HomeAssistant, callback from homeassistant.helpers import discovery from homeassistant.helpers.dispatcher import async_dispatcher_send +from homeassistant.helpers.entity_platform import AddEntitiesCallback +from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType from tests.common import MockModule, MockPlatform, mock_integration, mock_platform @@ -115,7 +117,7 @@ async def test_circular_import(hass: HomeAssistant) -> None: component_calls = [] platform_calls = [] - def component_setup(hass, config): + def component_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" discovery.load_platform( hass, Platform.SWITCH, "test_circular", {"key": "value"}, config @@ -123,7 +125,12 @@ async def test_circular_import(hass: HomeAssistant) -> None: component_calls.append(1) return True - def setup_platform(hass, config, add_entities_callback, discovery_info=None): + def setup_platform( + hass: HomeAssistant, + config: ConfigType, + add_entities_callback: AddEntitiesCallback, + discovery_info: DiscoveryInfoType | None = None, + ) -> None: """Set up mock platform.""" platform_calls.append("disc" if discovery_info else "component") @@ -162,14 +169,14 @@ async def test_1st_discovers_2nd_component(hass: HomeAssistant) -> None: """ component_calls = [] - async def component1_setup(hass, config): + async def component1_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" await discovery.async_discover( hass, "test_component2", {}, "test_component2", {} ) return True - def component2_setup(hass, config): + def component2_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" component_calls.append(1) return True diff --git a/tests/helpers/test_dispatcher.py b/tests/helpers/test_dispatcher.py index c2c8663f47c..0350b2e6e3a 100644 --- a/tests/helpers/test_dispatcher.py +++ b/tests/helpers/test_dispatcher.py @@ -188,8 +188,7 @@ async def test_callback_exception_gets_logged( @callback def bad_handler(*args): """Record calls.""" - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad message callback") + raise Exception("This is a bad message callback") # noqa: TRY002 # wrap in partial to test message logging. async_dispatcher_connect(hass, "test", partial(bad_handler)) @@ -209,8 +208,7 @@ async def test_coro_exception_gets_logged( async def bad_async_handler(*args): """Record calls.""" - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad message in a coro") + raise Exception("This is a bad message in a coro") # noqa: TRY002 # wrap in partial to test message logging. async_dispatcher_connect(hass, "test", bad_async_handler) diff --git a/tests/helpers/test_entity.py b/tests/helpers/test_entity.py index 283a5b4fb37..58554059fb4 100644 --- a/tests/helpers/test_entity.py +++ b/tests/helpers/test_entity.py @@ -16,6 +16,7 @@ import pytest from syrupy.assertion import SnapshotAssertion import voluptuous as vol +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( ATTR_ATTRIBUTION, ATTR_DEVICE_CLASS, @@ -34,6 +35,7 @@ from homeassistant.core import ( from homeassistant.exceptions import HomeAssistantError from homeassistant.helpers import device_registry as dr, entity, entity_registry as er from homeassistant.helpers.entity_component import async_update_entity +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import UNDEFINED, UndefinedType from tests.common import ( @@ -981,10 +983,13 @@ async def _test_friendly_name( ) -> None: """Test friendly name.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([ent]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1306,10 +1311,13 @@ async def test_entity_name_translation_placeholder_errors( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([ent]) - return True ent = MockEntity( unique_id="qwer", @@ -1531,7 +1539,11 @@ async def test_friendly_name_updated( ) -> None: """Test friendly name is updated when device or entity registry updates.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1547,7 +1559,6 @@ async def test_friendly_name_updated( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") diff --git a/tests/helpers/test_entity_component.py b/tests/helpers/test_entity_component.py index 3f34305b39d..9723b91eb9a 100644 --- a/tests/helpers/test_entity_component.py +++ b/tests/helpers/test_entity_component.py @@ -23,7 +23,7 @@ from homeassistant.core import ( callback, ) from homeassistant.exceptions import HomeAssistantError, PlatformNotReady -from homeassistant.helpers import discovery +from homeassistant.helpers import config_validation as cv, discovery from homeassistant.helpers.entity_component import EntityComponent, async_update_entity from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType @@ -495,7 +495,19 @@ async def test_extract_all_use_match_all( ) not in caplog.text -async def test_register_entity_service(hass: HomeAssistant) -> None: +@pytest.mark.parametrize( + ("schema", "service_data"), + [ + ({"some": str}, {"some": "data"}), + ({}, {}), + (None, {}), + ], +) +async def test_register_entity_service( + hass: HomeAssistant, + schema: dict | None, + service_data: dict, +) -> None: """Test registering an enttiy service and calling it.""" entity = MockEntity(entity_id=f"{DOMAIN}.entity") calls = [] @@ -510,9 +522,7 @@ async def test_register_entity_service(hass: HomeAssistant) -> None: await component.async_setup({}) await component.async_add_entities([entity]) - component.async_register_entity_service( - "hello", {"some": str}, "async_called_by_service" - ) + component.async_register_entity_service("hello", schema, "async_called_by_service") with pytest.raises(vol.Invalid): await hass.services.async_call( @@ -524,28 +534,57 @@ async def test_register_entity_service(hass: HomeAssistant) -> None: assert len(calls) == 0 await hass.services.async_call( - DOMAIN, "hello", {"entity_id": entity.entity_id, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": entity.entity_id} | service_data, blocking=True ) assert len(calls) == 1 - assert calls[0] == {"some": "data"} + assert calls[0] == service_data await hass.services.async_call( - DOMAIN, "hello", {"entity_id": ENTITY_MATCH_ALL, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": ENTITY_MATCH_ALL} | service_data, blocking=True ) assert len(calls) == 2 - assert calls[1] == {"some": "data"} + assert calls[1] == service_data await hass.services.async_call( - DOMAIN, "hello", {"entity_id": ENTITY_MATCH_NONE, "some": "data"}, blocking=True + DOMAIN, "hello", {"entity_id": ENTITY_MATCH_NONE} | service_data, blocking=True ) assert len(calls) == 2 await hass.services.async_call( - DOMAIN, "hello", {"area_id": ENTITY_MATCH_NONE, "some": "data"}, blocking=True + DOMAIN, "hello", {"area_id": ENTITY_MATCH_NONE} | service_data, blocking=True ) assert len(calls) == 2 +async def test_register_entity_service_non_entity_service_schema( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test attempting to register a service with a non entity service schema.""" + component = EntityComponent(_LOGGER, DOMAIN, hass) + expected_message = "registers an entity service with a non entity service schema" + + for idx, schema in enumerate( + ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + ) + ): + component.async_register_entity_service(f"hello_{idx}", schema, Mock()) + assert expected_message in caplog.text + caplog.clear() + + for idx, schema in enumerate( + ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.All(cv.make_entity_service_schema({"some": str})), + ) + ): + component.async_register_entity_service(f"test_service_{idx}", schema, Mock()) + assert expected_message not in caplog.text + + async def test_register_entity_service_response_data(hass: HomeAssistant) -> None: """Test an entity service that does support response data.""" entity = MockEntity(entity_id=f"{DOMAIN}.entity") diff --git a/tests/helpers/test_entity_platform.py b/tests/helpers/test_entity_platform.py index 75a41945a91..db83819085b 100644 --- a/tests/helpers/test_entity_platform.py +++ b/tests/helpers/test_entity_platform.py @@ -8,7 +8,9 @@ from typing import Any from unittest.mock import ANY, AsyncMock, Mock, patch import pytest +import voluptuous as vol +from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_HOMEASSISTANT_STARTED, PERCENTAGE, EntityCategory from homeassistant.core import ( CoreState, @@ -21,6 +23,7 @@ from homeassistant.core import ( from homeassistant.exceptions import HomeAssistantError, PlatformNotReady from homeassistant.helpers import ( area_registry as ar, + config_validation as cv, device_registry as dr, entity_platform, entity_registry as er, @@ -32,6 +35,7 @@ from homeassistant.helpers.entity_component import ( DEFAULT_SCAN_INTERVAL, EntityComponent, ) +from homeassistant.helpers.entity_platform import AddEntitiesCallback from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType import homeassistant.util.dt as dt_util @@ -854,10 +858,13 @@ async def test_setup_entry( ) -> None: """Test we can setup an entry.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([MockEntity(name="test1", unique_id="unique")]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1137,7 +1144,11 @@ async def test_device_info_called( model="via", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1162,7 +1173,6 @@ async def test_device_info_called( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1206,7 +1216,11 @@ async def test_device_info_not_overrides( assert device.manufacturer == "test-manufacturer" assert device.model == "test-model" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1221,7 +1235,6 @@ async def test_device_info_not_overrides( ) ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1256,7 +1269,11 @@ async def test_device_info_homeassistant_url( model="via", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1270,7 +1287,6 @@ async def test_device_info_homeassistant_url( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1305,7 +1321,11 @@ async def test_device_info_change_to_no_url( configuration_url="homeassistant://config/mqtt", ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1319,7 +1339,6 @@ async def test_device_info_change_to_no_url( ), ] ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) entity_platform = MockEntityPlatform( @@ -1374,10 +1393,13 @@ async def test_entity_disabled_by_device( unique_id="disabled", device_info=DeviceInfo(connections=connections) ) - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([entity_disabled]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id", domain=DOMAIN) @@ -1760,6 +1782,67 @@ async def test_register_entity_service_limited_to_matching_platforms( } +async def test_register_entity_service_none_schema( + hass: HomeAssistant, +) -> None: + """Test registering a service with schema set to None.""" + entity_platform = MockEntityPlatform( + hass, domain="mock_integration", platform_name="mock_platform", platform=None + ) + entity1 = SlowEntity(name="entity_1") + entity2 = SlowEntity(name="entity_1") + await entity_platform.async_add_entities([entity1, entity2]) + + entities = [] + + @callback + def handle_service(entity, *_): + entities.append(entity) + + entity_platform.async_register_entity_service("hello", None, handle_service) + + await hass.services.async_call( + "mock_platform", "hello", {"entity_id": "all"}, blocking=True + ) + + assert len(entities) == 2 + assert entity1 in entities + assert entity2 in entities + + +async def test_register_entity_service_non_entity_service_schema( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test attempting to register a service with a non entity service schema.""" + entity_platform = MockEntityPlatform( + hass, domain="mock_integration", platform_name="mock_platform", platform=None + ) + expected_message = "registers an entity service with a non entity service schema" + + for idx, schema in enumerate( + ( + vol.Schema({"some": str}), + vol.All(vol.Schema({"some": str})), + vol.Any(vol.Schema({"some": str})), + ) + ): + entity_platform.async_register_entity_service(f"hello_{idx}", schema, Mock()) + assert expected_message in caplog.text + caplog.clear() + + for idx, schema in enumerate( + ( + cv.make_entity_service_schema({"some": str}), + vol.Schema(cv.make_entity_service_schema({"some": str})), + vol.All(cv.make_entity_service_schema({"some": str})), + ) + ): + entity_platform.async_register_entity_service( + f"test_service_{idx}", schema, Mock() + ) + assert expected_message not in caplog.text + + @pytest.mark.parametrize("update_before_add", [True, False]) async def test_invalid_entity_id( hass: HomeAssistant, caplog: pytest.LogCaptureFixture, update_before_add: bool @@ -1796,13 +1879,16 @@ async def test_setup_entry_with_entities_that_block_forever( ) -> None: """Test we cancel adding entities when we reach the timeout.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [MockBlockingEntity(name="test1", unique_id="unique")], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1841,13 +1927,16 @@ async def test_cancellation_is_not_blocked( ) -> None: """Test cancellation is not blocked while adding entities.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [MockCancellingEntity(name="test1", unique_id="unique")], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -1935,7 +2024,11 @@ async def test_entity_name_influences_entity_id( ) -> None: """Test entity_id is influenced by entity name.""" - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [ @@ -1952,7 +2045,6 @@ async def test_entity_name_influences_entity_id( ], update_before_add=update_before_add, ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2020,12 +2112,15 @@ async def test_translated_entity_name_influences_entity_id( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities( [TranslatedEntity(has_entity_name)], update_before_add=update_before_add ) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2105,10 +2200,13 @@ async def test_translated_device_class_name_influences_entity_id( """Return all backend translations.""" return translations[language] - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([TranslatedDeviceClassEntity(device_class, has_entity_name)]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(entry_id="super-mock-id") @@ -2164,10 +2262,13 @@ async def test_device_name_defaulting_config_entry( _attr_unique_id = "qwer" _attr_device_info = device_info - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([DeviceNameEntity()]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry(title=config_entry_title, entry_id="super-mock-id") @@ -2217,10 +2318,13 @@ async def test_device_type_error_checking( _attr_unique_id = "qwer" _attr_device_info = device_info - async def async_setup_entry(hass, config_entry, async_add_entities): + async def async_setup_entry( + hass: HomeAssistant, + config_entry: ConfigEntry, + async_add_entities: AddEntitiesCallback, + ) -> None: """Mock setup entry method.""" async_add_entities([DeviceNameEntity()]) - return True platform = MockPlatform(async_setup_entry=async_setup_entry) config_entry = MockConfigEntry( diff --git a/tests/helpers/test_event.py b/tests/helpers/test_event.py index 4bb4c1a1967..19f1ef5bb76 100644 --- a/tests/helpers/test_event.py +++ b/tests/helpers/test_event.py @@ -1476,7 +1476,7 @@ async def test_track_template_result_super_template_2( wildercard_runs = [] wildercard_runs_availability = [] - template_availability = Template(availability_template) + template_availability = Template(availability_template, hass) template_condition = Template("{{states.sensor.test.state}}", hass) template_condition_var = Template( "{{(states.sensor.test.state|int) + test }}", hass @@ -1628,7 +1628,7 @@ async def test_track_template_result_super_template_2_initially_false( wildercard_runs = [] wildercard_runs_availability = [] - template_availability = Template(availability_template) + template_availability = Template(availability_template, hass) template_condition = Template("{{states.sensor.test.state}}", hass) template_condition_var = Template( "{{(states.sensor.test.state|int) + test }}", hass @@ -3124,11 +3124,11 @@ async def test_async_track_template_result_multiple_templates( ) -> None: """Test tracking multiple templates.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ states.switch.test.state == 'on' }}") - template_3 = Template("{{ states.switch.test.state == 'off' }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_3 = Template("{{ states.switch.test.state == 'off' }}", hass) template_4 = Template( - "{{ states.binary_sensor | map(attribute='entity_id') | list }}" + "{{ states.binary_sensor | map(attribute='entity_id') | list }}", hass ) refresh_runs = [] @@ -3188,11 +3188,12 @@ async def test_async_track_template_result_multiple_templates_mixing_domain( ) -> None: """Test tracking multiple templates when tracking entities and an entire domain.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ states.switch.test.state == 'on' }}") - template_3 = Template("{{ states.switch.test.state == 'off' }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_3 = Template("{{ states.switch.test.state == 'off' }}", hass) template_4 = Template( - "{{ states.switch | sort(attribute='entity_id') | map(attribute='entity_id') | list }}" + "{{ states.switch | sort(attribute='entity_id') | map(attribute='entity_id') | list }}", + hass, ) refresh_runs = [] @@ -3417,8 +3418,8 @@ async def test_async_track_template_result_multiple_templates_mixing_listeners( ) -> None: """Test tracking multiple templates with mixing listener types.""" - template_1 = Template("{{ states.switch.test.state == 'on' }}") - template_2 = Template("{{ now() and True }}") + template_1 = Template("{{ states.switch.test.state == 'on' }}", hass) + template_2 = Template("{{ now() and True }}", hass) refresh_runs = [] @@ -4937,3 +4938,43 @@ async def test_async_track_state_report_event(hass: HomeAssistant) -> None: await hass.async_block_till_done() assert len(tracker_called) == 2 unsub() + + +async def test_async_track_template_no_hass_deprecated( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test async_track_template with a template without hass is deprecated.""" + message = ( + "Detected code that calls async_track_template_result with template without " + "hass, which will stop working in HA Core 2025.10. Please report this issue." + ) + + async_track_template(hass, Template("blah"), lambda x, y, z: None) + assert message in caplog.text + caplog.clear() + + async_track_template(hass, Template("blah", hass), lambda x, y, z: None) + assert message not in caplog.text + caplog.clear() + + +async def test_async_track_template_result_no_hass_deprecated( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test async_track_template_result with a template without hass is deprecated.""" + message = ( + "Detected code that calls async_track_template_result with template without " + "hass, which will stop working in HA Core 2025.10. Please report this issue." + ) + + async_track_template_result( + hass, [TrackTemplate(Template("blah"), None)], lambda x, y, z: None + ) + assert message in caplog.text + caplog.clear() + + async_track_template_result( + hass, [TrackTemplate(Template("blah", hass), None)], lambda x, y, z: None + ) + assert message not in caplog.text + caplog.clear() diff --git a/tests/helpers/test_icon.py b/tests/helpers/test_icon.py index 732f9971ac0..e0dc89f5322 100644 --- a/tests/helpers/test_icon.py +++ b/tests/helpers/test_icon.py @@ -101,7 +101,7 @@ async def test_get_icons(hass: HomeAssistant) -> None: # Test services icons are available icons = await icon.async_get_icons(hass, "services") assert len(icons) == 1 - assert icons["switch"]["turn_off"] == "mdi:toggle-switch-variant-off" + assert icons["switch"]["turn_off"] == {"service": "mdi:toggle-switch-variant-off"} # Ensure icons file for platform isn't loaded, as that isn't supported icons = await icon.async_get_icons(hass, "entity") @@ -126,7 +126,7 @@ async def test_get_icons(hass: HomeAssistant) -> None: icons = await icon.async_get_icons(hass, "services") assert len(icons) == 2 - assert icons["test_package"]["enable_god_mode"] == "mdi:shield" + assert icons["test_package"]["enable_god_mode"] == {"service": "mdi:shield"} # Load another one hass.config.components.add("test_embedded") diff --git a/tests/helpers/test_integration_platform.py b/tests/helpers/test_integration_platform.py index 497bae5fb88..93bfeb2da5b 100644 --- a/tests/helpers/test_integration_platform.py +++ b/tests/helpers/test_integration_platform.py @@ -2,6 +2,7 @@ from collections.abc import Callable from types import ModuleType +from typing import Any from unittest.mock import Mock, patch import pytest @@ -29,7 +30,9 @@ async def test_process_integration_platforms_with_wait(hass: HomeAssistant) -> N processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -67,7 +70,9 @@ async def test_process_integration_platforms(hass: HomeAssistant) -> None: processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -107,7 +112,9 @@ async def test_process_integration_platforms_import_fails( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -150,7 +157,9 @@ async def test_process_integration_platforms_import_fails_after_registered( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -242,7 +251,9 @@ async def test_broken_integration( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) @@ -265,7 +276,9 @@ async def test_process_integration_platforms_no_integrations( processed = [] - async def _process_platform(hass, domain, platform): + async def _process_platform( + hass: HomeAssistant, domain: str, platform: Any + ) -> None: """Process platform.""" processed.append((domain, platform)) diff --git a/tests/helpers/test_significant_change.py b/tests/helpers/test_significant_change.py index f9dca5b6034..577ea5907e5 100644 --- a/tests/helpers/test_significant_change.py +++ b/tests/helpers/test_significant_change.py @@ -1,5 +1,8 @@ """Test significant change helper.""" +from types import MappingProxyType +from typing import Any + import pytest from homeassistant.components.sensor import SensorDeviceClass @@ -67,8 +70,14 @@ async def test_significant_change_extra( assert checker.async_is_significant_change(State(ent_id, "100", attrs), extra_arg=1) def extra_significant_check( - hass, old_state, old_attrs, old_extra_arg, new_state, new_attrs, new_extra_arg - ): + hass: HomeAssistant, + old_state: str, + old_attrs: dict | MappingProxyType, + old_extra_arg: Any, + new_state: str, + new_attrs: dict | MappingProxyType, + new_extra_arg: Any, + ) -> bool | None: return old_extra_arg != new_extra_arg checker.extra_significant_check = extra_significant_check diff --git a/tests/helpers/test_singleton.py b/tests/helpers/test_singleton.py index dcda1e2db3a..4722c58dc9f 100644 --- a/tests/helpers/test_singleton.py +++ b/tests/helpers/test_singleton.py @@ -1,9 +1,11 @@ """Test singleton helper.""" +from typing import Any from unittest.mock import Mock import pytest +from homeassistant.core import HomeAssistant from homeassistant.helpers import singleton @@ -14,11 +16,11 @@ def mock_hass(): @pytest.mark.parametrize("result", [object(), {}, []]) -async def test_singleton_async(mock_hass, result) -> None: +async def test_singleton_async(mock_hass: HomeAssistant, result: Any) -> None: """Test singleton with async function.""" @singleton.singleton("test_key") - async def something(hass): + async def something(hass: HomeAssistant) -> Any: return result result1 = await something(mock_hass) @@ -30,11 +32,11 @@ async def test_singleton_async(mock_hass, result) -> None: @pytest.mark.parametrize("result", [object(), {}, []]) -def test_singleton(mock_hass, result) -> None: +def test_singleton(mock_hass: HomeAssistant, result: Any) -> None: """Test singleton with function.""" @singleton.singleton("test_key") - def something(hass): + def something(hass: HomeAssistant) -> Any: return result result1 = something(mock_hass) diff --git a/tests/helpers/test_start.py b/tests/helpers/test_start.py index d9c6bbf441c..bd6b328a2c7 100644 --- a/tests/helpers/test_start.py +++ b/tests/helpers/test_start.py @@ -14,7 +14,7 @@ async def test_at_start_when_running_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -40,7 +40,7 @@ async def test_at_start_when_running_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -65,7 +65,7 @@ async def test_at_start_when_starting_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -88,7 +88,7 @@ async def test_at_start_when_starting_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -116,7 +116,7 @@ async def test_cancelling_at_start_when_running( calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -137,7 +137,7 @@ async def test_cancelling_at_start_when_starting(hass: HomeAssistant) -> None: calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -156,7 +156,7 @@ async def test_at_started_when_running_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -181,7 +181,7 @@ async def test_at_started_when_running_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -205,7 +205,7 @@ async def test_at_started_when_starting_awaitable(hass: HomeAssistant) -> None: calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -231,7 +231,7 @@ async def test_at_started_when_starting_callback( calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -263,7 +263,7 @@ async def test_cancelling_at_started_when_running( calls = [] - async def cb_at_start(hass): + async def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) @@ -284,7 +284,7 @@ async def test_cancelling_at_started_when_starting(hass: HomeAssistant) -> None: calls = [] @callback - def cb_at_start(hass): + def cb_at_start(hass: HomeAssistant) -> None: """Home Assistant is started.""" calls.append(1) diff --git a/tests/helpers/test_template.py b/tests/helpers/test_template.py index 3123c01f500..370e752e950 100644 --- a/tests/helpers/test_template.py +++ b/tests/helpers/test_template.py @@ -6236,3 +6236,65 @@ async def test_template_thread_safety_checks(hass: HomeAssistant) -> None: await hass.async_add_executor_job(template_obj.async_render_to_info) assert template_obj.async_render_to_info().result() == 23 + + +@pytest.mark.parametrize( + ("cola", "colb", "expected"), + [ + ([1, 2], [3, 4], [(1, 3), (2, 4)]), + ([1, 2], [3, 4, 5], [(1, 3), (2, 4)]), + ([1, 2, 3, 4], [3, 4], [(1, 3), (2, 4)]), + ], +) +def test_zip(hass: HomeAssistant, cola, colb, expected) -> None: + """Test zip.""" + assert ( + template.Template("{{ zip(cola, colb) | list }}", hass).async_render( + {"cola": cola, "colb": colb} + ) + == expected + ) + assert ( + template.Template( + "[{% for a, b in zip(cola, colb) %}({{a}}, {{b}}), {% endfor %}]", hass + ).async_render({"cola": cola, "colb": colb}) + == expected + ) + + +@pytest.mark.parametrize( + ("col", "expected"), + [ + ([(1, 3), (2, 4)], [(1, 2), (3, 4)]), + (["ax", "by", "cz"], [("a", "b", "c"), ("x", "y", "z")]), + ], +) +def test_unzip(hass: HomeAssistant, col, expected) -> None: + """Test unzipping using zip.""" + assert ( + template.Template("{{ zip(*col) | list }}", hass).async_render({"col": col}) + == expected + ) + assert ( + template.Template( + "{% set a, b = zip(*col) %}[{{a}}, {{b}}]", hass + ).async_render({"col": col}) + == expected + ) + + +def test_warn_no_hass(hass: HomeAssistant, caplog: pytest.LogCaptureFixture) -> None: + """Test deprecation warning when instantiating Template without hass.""" + + message = "Detected code that creates a template object without passing hass" + template.Template("blah") + assert message in caplog.text + caplog.clear() + + template.Template("blah", None) + assert message in caplog.text + caplog.clear() + + template.Template("blah", hass) + assert message not in caplog.text + caplog.clear() diff --git a/tests/scripts/test_auth.py b/tests/scripts/test_auth.py index 002807f08a5..e52a2cc6567 100644 --- a/tests/scripts/test_auth.py +++ b/tests/scripts/test_auth.py @@ -1,5 +1,6 @@ """Test the auth script to manage local users.""" +import argparse from asyncio import AbstractEventLoop from collections.abc import Generator import logging @@ -148,7 +149,9 @@ def test_parsing_args(event_loop: AbstractEventLoop) -> None: """Test we parse args correctly.""" called = False - async def mock_func(hass, provider, args2): + async def mock_func( + hass: HomeAssistant, provider: hass_auth.AuthProvider, args2: argparse.Namespace + ) -> None: """Mock function to be called.""" nonlocal called called = True diff --git a/tests/test_block_async_io.py b/tests/test_block_async_io.py index ef4f9df60f6..dc2b096f595 100644 --- a/tests/test_block_async_io.py +++ b/tests/test_block_async_io.py @@ -5,6 +5,7 @@ import glob import importlib import os from pathlib import Path, PurePosixPath +import ssl import time from typing import Any from unittest.mock import Mock, patch @@ -217,6 +218,17 @@ async def test_protect_loop_open(caplog: pytest.LogCaptureFixture) -> None: assert "Detected blocking call to open with args" not in caplog.text +async def test_protect_loop_path_open(caplog: pytest.LogCaptureFixture) -> None: + """Test opening a file in /proc is not reported.""" + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/proc/does_not_exist").open(encoding="utf8"), # noqa: ASYNC230 + ): + pass + assert "Detected blocking call to open with args" not in caplog.text + + async def test_protect_open(caplog: pytest.LogCaptureFixture) -> None: """Test opening a file in the event loop logs.""" with patch.object(block_async_io, "_IN_TESTS", False): @@ -230,6 +242,71 @@ async def test_protect_open(caplog: pytest.LogCaptureFixture) -> None: assert "Detected blocking call to open with args" in caplog.text +async def test_protect_path_open(caplog: pytest.LogCaptureFixture) -> None: + """Test opening a file in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").open(encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to open with args" in caplog.text + + +async def test_protect_path_read_bytes(caplog: pytest.LogCaptureFixture) -> None: + """Test reading file bytes in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").read_bytes(), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to read_bytes with args" in caplog.text + + +async def test_protect_path_read_text(caplog: pytest.LogCaptureFixture) -> None: + """Test reading a file text in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data_not_exist").read_text(encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to read_text with args" in caplog.text + + +async def test_protect_path_write_bytes(caplog: pytest.LogCaptureFixture) -> None: + """Test writing file bytes in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data/not/exist").write_bytes(b"xxx"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to write_bytes with args" in caplog.text + + +async def test_protect_path_write_text(caplog: pytest.LogCaptureFixture) -> None: + """Test writing file text in the event loop logs.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + with ( + contextlib.suppress(FileNotFoundError), + Path("/config/data/not/exist").write_text("xxx", encoding="utf8"), # noqa: ASYNC230 + ): + pass + + assert "Detected blocking call to write_text with args" in caplog.text + + async def test_enable_multiple_times(caplog: pytest.LogCaptureFixture) -> None: """Test trying to enable multiple times.""" with patch.object(block_async_io, "_IN_TESTS", False): @@ -330,6 +407,41 @@ async def test_protect_loop_walk( assert "Detected blocking call to walk with args" not in caplog.text +async def test_protect_loop_load_default_certs( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_default_certs calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + assert "Detected blocking call to load_default_certs" in caplog.text + assert context + + +async def test_protect_loop_load_verify_locations( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_verify_locations calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + with pytest.raises(OSError): + context.load_verify_locations("/dev/null") + assert "Detected blocking call to load_verify_locations" in caplog.text + + +async def test_protect_loop_load_cert_chain( + hass: HomeAssistant, caplog: pytest.LogCaptureFixture +) -> None: + """Test SSLContext.load_cert_chain calls in the loop are logged.""" + with patch.object(block_async_io, "_IN_TESTS", False): + block_async_io.enable() + context = ssl.create_default_context() + with pytest.raises(OSError): + context.load_cert_chain("/dev/null") + assert "Detected blocking call to load_cert_chain" in caplog.text + + async def test_open_calls_ignored_in_tests(caplog: pytest.LogCaptureFixture) -> None: """Test opening a file in tests is ignored.""" assert block_async_io._IN_TESTS diff --git a/tests/test_bootstrap.py b/tests/test_bootstrap.py index 278bfc631fd..a32d7d1e50b 100644 --- a/tests/test_bootstrap.py +++ b/tests/test_bootstrap.py @@ -213,7 +213,7 @@ async def test_setup_after_deps_all_present(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -260,7 +260,7 @@ async def test_setup_after_deps_in_stage_1_ignored(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -315,7 +315,7 @@ async def test_setup_after_deps_manifests_are_loaded_even_if_not_setup( order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -392,7 +392,7 @@ async def test_setup_frontend_before_recorder(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -471,7 +471,7 @@ async def test_setup_after_deps_via_platform(hass: HomeAssistant) -> None: after_dep_event = asyncio.Event() def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: if domain == "after_dep_of_platform_int": await after_dep_event.wait() @@ -520,7 +520,7 @@ async def test_setup_after_deps_not_trigger_load(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -559,7 +559,7 @@ async def test_setup_after_deps_not_present(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True @@ -969,7 +969,7 @@ async def test_empty_integrations_list_is_only_sent_at_the_end_of_bootstrap( order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) await asyncio.sleep(0.05) @@ -1029,7 +1029,7 @@ async def test_warning_logged_on_wrap_up_timeout( task: asyncio.Task | None = None def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: nonlocal task async def _not_marked_background_task(): @@ -1067,7 +1067,7 @@ async def test_tasks_logged_that_block_stage_1( """Test we log tasks that delay stage 1 startup.""" def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def _not_marked_background_task(): await asyncio.sleep(0.2) @@ -1110,7 +1110,7 @@ async def test_tasks_logged_that_block_stage_2( done_future = hass.loop.create_future() def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: async def _not_marked_background_task(): await done_future @@ -1452,7 +1452,7 @@ async def test_setup_does_base_platforms_first(hass: HomeAssistant) -> None: order = [] def gen_domain_setup(domain): - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: order.append(domain) return True diff --git a/tests/test_config.py b/tests/test_config.py index c7039cabe8b..02f8e1fc078 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -36,6 +36,7 @@ from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, ConfigSource, HomeAssistant, + State, ) from homeassistant.exceptions import ConfigValidationError, HomeAssistantError from homeassistant.helpers import ( @@ -579,7 +580,7 @@ def test_customize_glob_is_ordered() -> None: assert isinstance(conf["customize_glob"], OrderedDict) -async def _compute_state(hass, config): +async def _compute_state(hass: HomeAssistant, config: dict[str, Any]) -> State | None: await config_util.async_process_ha_core_config(hass, config) entity = Entity() diff --git a/tests/test_config_entries.py b/tests/test_config_entries.py index 2a5dff5c14a..3042ccb28d9 100644 --- a/tests/test_config_entries.py +++ b/tests/test_config_entries.py @@ -18,6 +18,7 @@ from syrupy.assertion import SnapshotAssertion from homeassistant import config_entries, data_entry_flow, loader from homeassistant.components import dhcp from homeassistant.components.hassio import HassioServiceInfo +from homeassistant.config_entries import ConfigEntry from homeassistant.const import ( EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_STARTED, @@ -104,12 +105,12 @@ async def test_setup_race_only_setup_once(hass: HomeAssistant) -> None: fast_config_entry_setup_future = hass.loop.create_future() slow_setup_future = hass.loop.create_future() - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" await slow_setup_future return True - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry.""" slow = entry.data["slow"] if slow: @@ -122,7 +123,7 @@ async def test_setup_race_only_setup_once(hass: HomeAssistant) -> None: await fast_config_entry_setup_future return True - async def async_unload_entry(hass, entry): + async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock unload entry.""" return True @@ -582,9 +583,9 @@ async def test_remove_entry_raises( ) -> None: """Test if a component raises while removing entry.""" - async def mock_unload_entry(hass, entry): + async def mock_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock unload entry function.""" - raise Exception("BROKEN") # pylint: disable=broad-exception-raised + raise Exception("BROKEN") # noqa: TRY002 mock_integration(hass, MockModule("comp", async_unload_entry=mock_unload_entry)) @@ -1326,7 +1327,7 @@ async def test_update_entry_options_and_trigger_listener( entry.add_to_manager(manager) update_listener_calls = [] - async def update_listener(hass, entry): + async def update_listener(hass: HomeAssistant, entry: ConfigEntry) -> None: """Test function.""" assert entry.options == {"second": True} update_listener_calls.append(None) @@ -1491,7 +1492,7 @@ async def test_reload_during_setup_retrying_waits(hass: HomeAssistant) -> None: load_attempts = [] sleep_duration = 0 - async def _mock_setup_entry(hass, entry): + async def _mock_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry.""" nonlocal sleep_duration await asyncio.sleep(sleep_duration) @@ -1536,7 +1537,7 @@ async def test_create_entry_options( ) -> None: """Test a config entry being created with options.""" - async def mock_async_setup(hass, config): + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -3234,7 +3235,7 @@ async def test_async_setup_init_entry_completes_before_loaded_event_fires( """Test a config entry being initialized during integration setup before the loaded event fires.""" load_events = async_capture_events(hass, EVENT_COMPONENT_LOADED) - async def mock_async_setup(hass, config): + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -3292,7 +3293,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: entry = MockConfigEntry(domain="comp", data={"value": "initial"}) entry.add_to_hass(hass) - async def mock_async_setup(hass, config): + async def mock_async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Mock setup.""" hass.async_create_task( hass.config_entries.flow.async_init( @@ -3303,7 +3304,7 @@ async def test_async_setup_update_entry(hass: HomeAssistant) -> None: ) return True - async def mock_async_setup_entry(hass, entry): + async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setting up an entry.""" assert entry.data["value"] == "updated" return True @@ -3791,7 +3792,7 @@ async def test_setup_raise_entry_error_from_first_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -3831,7 +3832,7 @@ async def test_setup_not_raise_entry_error_from_future_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -3910,7 +3911,7 @@ async def test_setup_raise_auth_failed_from_first_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -3962,7 +3963,7 @@ async def test_setup_raise_auth_failed_from_future_coordinator_update( entry = MockConfigEntry(title="test_title", domain="test") entry.add_to_hass(hass) - async def async_setup_entry(hass, entry): + async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setup entry with a simple coordinator.""" async def _async_update_data(): @@ -4409,12 +4410,12 @@ async def test_unique_id_update_while_setup_in_progress( ) -> None: """Test we handle the case where the config entry is updated while setup is in progress.""" - async def mock_setup_entry(hass, entry): + async def mock_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setting up entry.""" await asyncio.sleep(0.1) return True - async def mock_unload_entry(hass, entry): + async def mock_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock unloading an entry.""" return True @@ -5463,7 +5464,7 @@ async def test_reload_during_setup(hass: HomeAssistant) -> None: in_setup = False setup_calls = 0 - async def mock_async_setup_entry(hass, entry): + async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setting up an entry.""" nonlocal in_setup nonlocal setup_calls @@ -6000,3 +6001,44 @@ async def test_migration_from_1_2( ] }, } + + +async def test_async_loaded_entries( + hass: HomeAssistant, manager: config_entries.ConfigEntries +) -> None: + """Test that we can get loaded config entries.""" + entry1 = MockConfigEntry(domain="comp") + entry1.add_to_hass(hass) + entry2 = MockConfigEntry(domain="comp", source=config_entries.SOURCE_IGNORE) + entry2.add_to_hass(hass) + entry3 = MockConfigEntry( + domain="comp", disabled_by=config_entries.ConfigEntryDisabler.USER + ) + entry3.add_to_hass(hass) + + mock_setup = AsyncMock(return_value=True) + mock_setup_entry = AsyncMock(return_value=True) + mock_unload_entry = AsyncMock(return_value=True) + + mock_integration( + hass, + MockModule( + "comp", + async_setup=mock_setup, + async_setup_entry=mock_setup_entry, + async_unload_entry=mock_unload_entry, + ), + ) + mock_platform(hass, "comp.config_flow", None) + + assert hass.config_entries.async_loaded_entries("comp") == [] + + assert await manager.async_setup(entry1.entry_id) + assert not await manager.async_setup(entry2.entry_id) + assert not await manager.async_setup(entry3.entry_id) + + assert hass.config_entries.async_loaded_entries("comp") == [entry1] + + assert await hass.config_entries.async_unload(entry1.entry_id) + + assert hass.config_entries.async_loaded_entries("comp") == [] diff --git a/tests/test_core.py b/tests/test_core.py index 8035236fd08..9ca57d1563f 100644 --- a/tests/test_core.py +++ b/tests/test_core.py @@ -424,11 +424,11 @@ async def test_async_get_hass_can_be_called(hass: HomeAssistant) -> None: try: if ha.async_get_hass() is hass: return True - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 except HomeAssistantError: return False - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 # Test scheduling a coroutine which calls async_get_hass via hass.async_create_task async def _async_create_task() -> None: diff --git a/tests/test_data_entry_flow.py b/tests/test_data_entry_flow.py index 967b2565206..01b6a530105 100644 --- a/tests/test_data_entry_flow.py +++ b/tests/test_data_entry_flow.py @@ -1098,3 +1098,27 @@ def test_section_in_serializer() -> None: ], "type": "expandable", } + + +def test_nested_section_in_serializer() -> None: + """Test section with custom_serializer.""" + with pytest.raises( + ValueError, match="Nesting expandable sections is not supported" + ): + cv.custom_serializer( + data_entry_flow.section( + vol.Schema( + { + vol.Required("section_1"): data_entry_flow.section( + vol.Schema( + { + vol.Optional("option_1", default=False): bool, + vol.Required("option_2"): int, + } + ) + ) + } + ), + {"collapsed": False}, + ) + ) diff --git a/tests/test_loader.py b/tests/test_loader.py index ae5280b2dcd..01305dde002 100644 --- a/tests/test_loader.py +++ b/tests/test_loader.py @@ -658,7 +658,9 @@ def _get_test_integration( ) -def _get_test_integration_with_application_credentials(hass, name): +def _get_test_integration_with_application_credentials( + hass: HomeAssistant, name: str +) -> loader.Integration: """Return a generated test integration with application_credentials support.""" return loader.Integration( hass, @@ -678,7 +680,9 @@ def _get_test_integration_with_application_credentials(hass, name): ) -def _get_test_integration_with_zeroconf_matcher(hass, name, config_flow): +def _get_test_integration_with_zeroconf_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a zeroconf matcher.""" return loader.Integration( hass, @@ -697,7 +701,9 @@ def _get_test_integration_with_zeroconf_matcher(hass, name, config_flow): ) -def _get_test_integration_with_legacy_zeroconf_matcher(hass, name, config_flow): +def _get_test_integration_with_legacy_zeroconf_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a legacy zeroconf matcher.""" return loader.Integration( hass, @@ -724,7 +730,9 @@ def _get_test_integration_with_legacy_zeroconf_matcher(hass, name, config_flow): ) -def _get_test_integration_with_dhcp_matcher(hass, name, config_flow): +def _get_test_integration_with_dhcp_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a dhcp matcher.""" return loader.Integration( hass, @@ -748,7 +756,9 @@ def _get_test_integration_with_dhcp_matcher(hass, name, config_flow): ) -def _get_test_integration_with_bluetooth_matcher(hass, name, config_flow): +def _get_test_integration_with_bluetooth_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a bluetooth matcher.""" return loader.Integration( hass, @@ -767,7 +777,9 @@ def _get_test_integration_with_bluetooth_matcher(hass, name, config_flow): ) -def _get_test_integration_with_usb_matcher(hass, name, config_flow): +def _get_test_integration_with_usb_matcher( + hass: HomeAssistant, name: str, config_flow: bool +) -> loader.Integration: """Return a generated test integration with a usb matcher.""" return loader.Integration( hass, diff --git a/tests/test_runner.py b/tests/test_runner.py index 141af4f4bc7..c61b8ed5628 100644 --- a/tests/test_runner.py +++ b/tests/test_runner.py @@ -105,7 +105,7 @@ def test_run_does_not_block_forever_with_shielded_task( try: await asyncio.sleep(2) except asyncio.CancelledError: - raise Exception # pylint: disable=broad-exception-raised + raise Exception # noqa: TRY002 async def async_shielded(*_): try: @@ -142,8 +142,7 @@ async def test_unhandled_exception_traceback( async def _unhandled_exception(): raised.set() - # pylint: disable-next=broad-exception-raised - raise Exception("This is unhandled") + raise Exception("This is unhandled") # noqa: TRY002 try: hass.loop.set_debug(True) diff --git a/tests/test_setup.py b/tests/test_setup.py index 4e7c23865da..c50f8392d66 100644 --- a/tests/test_setup.py +++ b/tests/test_setup.py @@ -9,6 +9,7 @@ import pytest import voluptuous as vol from homeassistant import config_entries, loader, setup +from homeassistant.config_entries import ConfigEntry from homeassistant.const import EVENT_COMPONENT_LOADED, EVENT_HOMEASSISTANT_START from homeassistant.core import ( DOMAIN as HOMEASSISTANT_DOMAIN, @@ -23,6 +24,7 @@ from homeassistant.helpers.dispatcher import ( async_dispatcher_send, ) from homeassistant.helpers.issue_registry import IssueRegistry +from homeassistant.helpers.typing import ConfigType from .common import ( MockConfigEntry, @@ -298,9 +300,10 @@ async def test_component_not_setup_twice_if_loaded_during_other_setup( """Test component setup while waiting for lock is not set up twice.""" result = [] - async def async_setup(hass, config): + async def async_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Tracking Setup.""" result.append(1) + return True mock_integration(hass, MockModule("comp", async_setup=async_setup)) @@ -345,9 +348,9 @@ async def test_component_exception_setup(hass: HomeAssistant) -> None: """Test component that raises exception during setup.""" setup.async_set_domains_to_be_loaded(hass, {"comp"}) - def exception_setup(hass, config): + def exception_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Raise exception.""" - raise Exception("fail!") # pylint: disable=broad-exception-raised + raise Exception("fail!") # noqa: TRY002 mock_integration(hass, MockModule("comp", setup=exception_setup)) @@ -359,9 +362,9 @@ async def test_component_base_exception_setup(hass: HomeAssistant) -> None: """Test component that raises exception during setup.""" setup.async_set_domains_to_be_loaded(hass, {"comp"}) - def exception_setup(hass, config): + def exception_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Raise exception.""" - raise BaseException("fail!") # pylint: disable=broad-exception-raised + raise BaseException("fail!") # noqa: TRY002 mock_integration(hass, MockModule("comp", setup=exception_setup)) @@ -377,12 +380,11 @@ async def test_component_setup_with_validation_and_dependency( ) -> None: """Test all config is passed to dependencies.""" - def config_check_setup(hass, config): + def config_check_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Test that config is passed in.""" if config.get("comp_a", {}).get("valid", False): return True - # pylint: disable-next=broad-exception-raised - raise Exception(f"Config not passed in: {config}") + raise Exception(f"Config not passed in: {config}") # noqa: TRY002 platform = MockPlatform() @@ -500,7 +502,7 @@ async def test_all_work_done_before_start(hass: HomeAssistant) -> None: """Test all init work done till start.""" call_order = [] - async def component1_setup(hass, config): + async def component1_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" await discovery.async_discover( hass, "test_component2", {}, "test_component2", {} @@ -510,7 +512,7 @@ async def test_all_work_done_before_start(hass: HomeAssistant) -> None: ) return True - def component_track_setup(hass, config): + def component_track_setup(hass: HomeAssistant, config: ConfigType) -> bool: """Set up mock component.""" call_order.append(1) return True @@ -586,7 +588,7 @@ async def test_when_setup_already_loaded(hass: HomeAssistant) -> None: """Test when setup.""" calls = [] - async def mock_callback(hass, component): + async def mock_callback(hass: HomeAssistant, component: str) -> None: """Mock callback.""" calls.append(component) @@ -614,7 +616,7 @@ async def test_async_when_setup_or_start_already_loaded(hass: HomeAssistant) -> """Test when setup or start.""" calls = [] - async def mock_callback(hass, component): + async def mock_callback(hass: HomeAssistant, component: str) -> None: """Mock callback.""" calls.append(component) @@ -660,7 +662,7 @@ async def test_parallel_entry_setup(hass: HomeAssistant, mock_handlers) -> None: calls = [] - async def mock_async_setup_entry(hass, entry): + async def mock_async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool: """Mock setting up an entry.""" calls.append(entry.data["value"]) await asyncio.sleep(0) diff --git a/tests/test_util/aiohttp.py b/tests/test_util/aiohttp.py index d0bd7fbeb2f..04d6db509e0 100644 --- a/tests/test_util/aiohttp.py +++ b/tests/test_util/aiohttp.py @@ -5,6 +5,7 @@ from collections.abc import Iterator from contextlib import contextmanager from http import HTTPStatus import re +from typing import Any from unittest import mock from urllib.parse import parse_qs @@ -19,6 +20,7 @@ from multidict import CIMultiDict from yarl import URL from homeassistant.const import EVENT_HOMEASSISTANT_CLOSE +from homeassistant.core import HomeAssistant from homeassistant.helpers.json import json_dumps from homeassistant.util.json import json_loads @@ -37,7 +39,7 @@ def mock_stream(data): class AiohttpClientMocker: """Mock Aiohttp client requests.""" - def __init__(self): + def __init__(self) -> None: """Initialize the request mocker.""" self._mocks = [] self._cookies = {} @@ -174,7 +176,7 @@ class AiohttpClientMockResponse: headers=None, side_effect=None, closing=None, - ): + ) -> None: """Initialize a fake response.""" if json is not None: text = json_dumps(json) @@ -301,7 +303,7 @@ def mock_aiohttp_client() -> Iterator[AiohttpClientMocker]: """Context manager to mock aiohttp client.""" mocker = AiohttpClientMocker() - def create_session(hass, *args, **kwargs): + def create_session(hass: HomeAssistant, *args: Any, **kwargs: Any) -> ClientSession: session = mocker.create_session(hass.loop) async def close_session(event): @@ -327,7 +329,7 @@ class MockLongPollSideEffect: If queue is empty, will await until done. """ - def __init__(self): + def __init__(self) -> None: """Initialize the queue.""" self.semaphore = asyncio.Semaphore(0) self.response_list = [] diff --git a/tests/util/test_async.py b/tests/util/test_async.py index ac927b1375a..cda10b69c3f 100644 --- a/tests/util/test_async.py +++ b/tests/util/test_async.py @@ -14,24 +14,26 @@ from tests.common import extract_stack_to_frame @patch("concurrent.futures.Future") @patch("threading.get_ident") -def test_run_callback_threadsafe_from_inside_event_loop(mock_ident, _) -> None: +def test_run_callback_threadsafe_from_inside_event_loop( + mock_ident: MagicMock, mock_future: MagicMock +) -> None: """Testing calling run_callback_threadsafe from inside an event loop.""" callback = MagicMock() loop = Mock(spec=["call_soon_threadsafe"]) - loop._thread_ident = None + loop._thread_id = None mock_ident.return_value = 5 hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 1 - loop._thread_ident = 5 + loop._thread_id = 5 mock_ident.return_value = 5 with pytest.raises(RuntimeError): hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 1 - loop._thread_ident = 1 + loop._thread_id = 1 mock_ident.return_value = 5 hasync.run_callback_threadsafe(loop, callback) assert len(loop.call_soon_threadsafe.mock_calls) == 2 @@ -76,7 +78,7 @@ async def test_run_callback_threadsafe(hass: HomeAssistant) -> None: nonlocal it_ran it_ran = True - with patch.dict(hass.loop.__dict__, {"_thread_ident": -1}): + with patch.dict(hass.loop.__dict__, {"_thread_id": -1}): assert hasync.run_callback_threadsafe(hass.loop, callback) assert it_ran is False @@ -96,7 +98,7 @@ async def test_callback_is_always_scheduled(hass: HomeAssistant) -> None: hasync.shutdown_run_callback_threadsafe(hass.loop) with ( - patch.dict(hass.loop.__dict__, {"_thread_ident": -1}), + patch.dict(hass.loop.__dict__, {"_thread_id": -1}), patch.object(hass.loop, "call_soon_threadsafe") as mock_call_soon_threadsafe, pytest.raises(RuntimeError), ): @@ -197,3 +199,17 @@ async def test_create_eager_task_from_thread_in_integration( "from a thread at homeassistant/components/hue/light.py, line 23: " "self.light.is_on" ) in caplog.text + + +async def test_get_scheduled_timer_handles(hass: HomeAssistant) -> None: + """Test get_scheduled_timer_handles returns all scheduled timer handles.""" + loop = hass.loop + timer_handle = loop.call_later(10, lambda: None) + timer_handle2 = loop.call_later(5, lambda: None) + timer_handle3 = loop.call_later(15, lambda: None) + + handles = hasync.get_scheduled_timer_handles(loop) + assert set(handles).issuperset({timer_handle, timer_handle2, timer_handle3}) + timer_handle.cancel() + timer_handle2.cancel() + timer_handle3.cancel() diff --git a/tests/util/test_logging.py b/tests/util/test_logging.py index 4667dbcbec8..795444c89bd 100644 --- a/tests/util/test_logging.py +++ b/tests/util/test_logging.py @@ -80,8 +80,7 @@ async def test_async_create_catching_coro( """Test exception logging of wrapped coroutine.""" async def job(): - # pylint: disable-next=broad-exception-raised - raise Exception("This is a bad coroutine") + raise Exception("This is a bad coroutine") # noqa: TRY002 hass.async_create_task(logging_util.async_create_catching_coro(job())) await hass.async_block_till_done() diff --git a/tests/util/yaml/test_init.py b/tests/util/yaml/test_init.py index ece65504ed6..dbd7f1d2e99 100644 --- a/tests/util/yaml/test_init.py +++ b/tests/util/yaml/test_init.py @@ -726,3 +726,44 @@ def test_load_yaml_dict_fail() -> None: """Test item without a key.""" with pytest.raises(yaml_loader.YamlTypeError): yaml_loader.load_yaml_dict(YAML_CONFIG_FILE) + + +@pytest.mark.parametrize( + "tag", + [ + "!include", + "!include_dir_named", + "!include_dir_merge_named", + "!include_dir_list", + "!include_dir_merge_list", + ], +) +@pytest.mark.usefixtures("try_both_loaders") +def test_include_without_parameter(tag: str) -> None: + """Test include extensions without parameters.""" + with ( + io.StringIO(f"key: {tag}") as file, + pytest.raises(HomeAssistantError, match=f"{tag} needs an argument"), + ): + yaml_loader.parse_yaml(file) + + +@pytest.mark.parametrize( + ("open_exception", "load_yaml_exception"), + [ + (FileNotFoundError, OSError), + (NotADirectoryError, HomeAssistantError), + (PermissionError, HomeAssistantError), + ], +) +@pytest.mark.usefixtures("try_both_loaders") +def test_load_yaml_wrap_oserror( + open_exception: Exception, + load_yaml_exception: Exception, +) -> None: + """Test load_yaml wraps OSError in HomeAssistantError.""" + with ( + patch("homeassistant.util.yaml.loader.open", side_effect=open_exception), + pytest.raises(load_yaml_exception), + ): + yaml_loader.load_yaml("bla")